hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1d6716c6302b90b55715b903cc01e19cfa43a54b
| 61,620
|
py
|
Python
|
src/foreign_if/python/main/python/frovedis/mllib/linear_model.py
|
XpressAI/frovedis
|
bda0f2c688fb832671c5b542dd8df1c9657642ff
|
[
"BSD-2-Clause"
] | null | null | null |
src/foreign_if/python/main/python/frovedis/mllib/linear_model.py
|
XpressAI/frovedis
|
bda0f2c688fb832671c5b542dd8df1c9657642ff
|
[
"BSD-2-Clause"
] | null | null | null |
src/foreign_if/python/main/python/frovedis/mllib/linear_model.py
|
XpressAI/frovedis
|
bda0f2c688fb832671c5b542dd8df1c9657642ff
|
[
"BSD-2-Clause"
] | null | null | null |
"""
linear_model.py: wrapper of frovedis Logistic Regression, Linear Regression,
Lasso, Ridge Regression, SGDClassifier and SGDRegressor
"""
import os.path
import pickle
import warnings
import numpy as np
from .model_util import *
from .metrics import *
from ..base import BaseEstimator
from ..exrpc import rpclib
from ..exrpc.server import FrovedisServer, set_association, \
check_association, do_if_active_association
from ..matrix.ml_data import FrovedisLabeledPoint
from ..matrix.dtype import TypeUtil
from ..utils import check_sample_weight
class LogisticRegression(BaseEstimator):
"""
A python wrapper of Frovedis Logistic Regression
defaults are as per Frovedis
C: Frovedis: 100, Sklearn: 1
max_iter: Frovedis: 1000, Sklearn: 100
lr_rate: Frovedis: 0.01 (added)
use_shrink: Frovedis: false (added)
"""
def __init__(self, penalty='l2', dual=False, tol=1e-4, C=100.0,
fit_intercept=True, intercept_scaling=1, class_weight=None,
random_state=None, solver='lbfgs', max_iter=1000,
multi_class='auto', verbose=0, warm_start=False,
n_jobs=1, l1_ratio=None, lr_rate=0.01, use_shrink=False):
self.penalty = penalty
self.dual = dual
self.tol = tol
self.C = C
self.fit_intercept = fit_intercept
self.intercept_scaling = intercept_scaling
self.class_weight = class_weight
self.random_state = random_state
self.solver = solver
self.max_iter = max_iter
self.multi_class = multi_class
self.verbose = verbose
self.warm_start = warm_start
self.n_jobs = n_jobs
self.l1_ratio = l1_ratio
# extra
self.lr_rate = lr_rate
self.use_shrink = use_shrink
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = None
self.label_map = None
self.n_classes = None
self._classes = None
self._intercept = None
self._coef = None
self._n_iter = None
self.n_samples = None
self.n_features = None
self.isMult = None
self.isFitted = False
def check_input(self, X, y, F):
"""checks input X"""
# for binary case: frovedis supports -1 and 1
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
encode_label = True, binary_encoder=[-1, 1], \
dense_kind = 'colmajor', densify=False)
X, y, logic = inp_data.get()
self._classes = inp_data.get_distinct_labels()
self.n_classes = len(self._classes)
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
self.label_map = logic
dtype = inp_data.get_dtype()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
return X, y, dtype, itype, dense
@set_association
def fit(self, X, y, sample_weight=None):
"""
NAME: fit
"""
if self.C < 0:
raise ValueError("fit: parameter C must be strictly positive!")
self.reset_metadata()
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
if dense and self.use_shrink:
raise ValueError("fit: use_shrink is applicable only for " \
+ "sparse data!")
if self.use_shrink:
if self.solver == "lbfgs":
raise ValueError("fit: use_shrink is applicable only for " \
+ "sgd solver!")
self.__mkind = M_KIND.LR
if self.multi_class == 'auto' or self.multi_class == 'ovr':
if self.n_classes == 2:
isMult = False
else:
isMult = True
elif self.multi_class == 'multinomial':
isMult = True # even for binary data
else:
raise ValueError("Unknown multi_class: %s!" % self.multi_class)
if isMult and self.solver != 'sag':
self.solver = 'sag' #only sag solver supports multinomial currently
warnings.warn("fit: multinomial classification problem is " +
"detected... switching solver to 'sag'.\n")
if self.penalty == 'l1':
regTyp = 1
elif self.penalty == 'l2':
regTyp = 2
elif self.penalty == 'none':
regTyp = 0
else:
raise ValueError("Unsupported penalty is provided: ", self.penalty)
solver = self.solver
if solver == 'sag':
solver = 'sgd'
elif solver == 'lbfgs':
regTyp = 2 # l2 is supported for lbfgs
else:
raise ValueError( \
"Unknown solver %s for Logistic Regression." % solver)
if self.C == 1.0:
rparam = 0.01 # 1.0 / 100.0
else:
rparam = 1.0 / self.C
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
n_iter = rpclib.lr(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), self.max_iter, \
self.lr_rate, regTyp, rparam, isMult, \
self.fit_intercept, self.tol, self.verbose, \
self.__mid, dtype, itype, dense, \
solver.encode('ascii'), \
self.use_shrink, self.warm_start)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = None
self._intercept = None
self._n_iter = np.asarray([n_iter], dtype = np.int32)
self.isMult = isMult
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
if not self.isMult:
n_features = len(wgt)
shape = (1, n_features)
else:
n_features = len(wgt) // self.n_classes
shape = (self.n_classes, n_features)
self._coef = np.asarray(wgt).reshape(shape)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError(\
"attribute 'coef_' of LogisticRegression object is not writable")
@property
@check_association
def intercept_(self):
"""intercept getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = np.asarray(icpt)
return self._intercept
@intercept_.setter
def intercept_(self, val):
""" intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of LogisticRegression object is not writable")
@property
def classes_(self):
"""classes_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'classes_'" \
"might have been released or called before fit")
if self._classes is None:
self._classes = np.sort(list(self.label_map.values()))
return self._classes
@classes_.setter
def classes_(self, val):
"""classes_ setter"""
raise AttributeError(\
"attribute 'classes_' of LogisticRegression object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of LogisticRegression object is not writable")
@check_association
def predict(self, X):
"""
NAME: predict
"""
frov_pred = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
return np.asarray([self.label_map[frov_pred[i]] \
for i in range(0, len(frov_pred))])
@check_association
def predict_proba(self, X):
"""
NAME: predict_proba
"""
proba = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, True, self.n_classes)
n_samples = len(proba) // self.n_classes
shape = (n_samples, self.n_classes)
return np.asarray(proba, dtype=np.float64).reshape(shape)
# calculate the mean accuracy on the given test data and labels.
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
return accuracy_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
target = open(fname + "/label_map", "rb")
self.label_map = pickle.load(target)
target.close()
self._classes = np.sort(list(self.label_map.values()))
metadata = open(fname + "/metadata", "rb")
self.n_classes, self.__mkind, self.__mdtype = pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
target = open(fname + "/label_map", "wb")
pickle.dump(self.label_map, target)
target.close()
metadata = open(fname + "/metadata", "wb")
pickle.dump((self.n_classes, self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self._classes = None
self._n_iter = None
self.n_samples = None
self.isMult = None
self.isFitted = False
self.n_features = None
self.n_classes = None # check if release is merged
self.label_map = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
destructs the python object
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
class LinearRegression(BaseEstimator):
"""A python wrapper of Frovedis Linear Regression
max_iter: Frovedis: 1000 (added)
solver: Frovedis: None (default value is chosen
based on training matrix type) (added)
lr_rate: Frovedis: 0.01 (added)
tol: Frovedis: 0.0001 (added)
"""
def __init__(self, fit_intercept=True, normalize=False, copy_X=True,
n_jobs=None, max_iter=None, tol=0.0001, lr_rate=1e-8,
solver=None, verbose=0, warm_start = False):
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.n_jobs = n_jobs
self.warm_start = warm_start
# extra
self.max_iter = max_iter
self.tol = tol
self.lr_rate = lr_rate
self.solver = solver
self.verbose = verbose
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = M_KIND.LNRM
self._intercept = None
self._coef = None
self.n_samples = None
self.n_features = None
self._n_iter = None
self.singular_ = None
self.rank_ = None
self.isFitted = None
def check_input(self, X, y, F):
"""checks input X"""
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
dense_kind = 'colmajor', densify=False)
X, y = inp_data.get()
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
dtype = inp_data.get_dtype()
return X, y, dtype, itype, dense
@set_association
def fit(self, X, y, sample_weight=None):
"""
NAME: fit
"""
self.reset_metadata()
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
# select default solver, when None is given
if self.solver is None:
if dense:
self.solver = 'lapack' # ?gelsd for dense X
else:
self.solver = 'sparse_lsqr' # sparse_lsqr for sparse X
else:
self.solver = self.solver
if self.solver in ('lapack', 'scalapack'):
if not dense:
raise TypeError("%s solver supports only dense feature data!" \
% (self.solver))
if self.warm_start:
raise TypeError("%s solver does not support warm_start!" \
% (self.solver))
elif self.solver in ('sparse_lsqr'):
if dense:
raise TypeError("%s solver supports only sparse feature data!" \
% (self.solver))
if self.warm_start:
raise TypeError("%s solver does not support warm_start!" \
% (self.solver))
elif self.solver not in ('sag', 'sgd', 'lbfgs'):
raise ValueError( \
"Unknown solver %s for Linear Regression." % self.solver)
if self.max_iter is None:
if self.solver == 'sparse_lsqr':
niter = 2 * X.numCols()
else:
niter = 1000 # default for sag and lbfgs
else:
niter = self.max_iter
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
solver = self.solver
if solver == 'sag':
solver = 'sgd'
res = rpclib.lnr(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
niter, self.lr_rate, \
self.fit_intercept, self.tol, self.verbose, self.__mid, \
dtype, itype, dense, solver.encode('ascii'), self.warm_start)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
if solver == 'lapack': #singular_ and rank_ available only for lapack solver
sval = res['singular']
self.singular_ = np.asarray(sval, TypeUtil.to_numpy_dtype(dtype))
self.rank_ = int(res['rank'])
if solver not in ('lapack', 'scalapack'):
self._n_iter = res['n_iter']
self._coef = None
self._intercept = None
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = np.asarray(wgt)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError("attribute 'coef_' \
of LinearRegression object is not writable")
@property
@check_association
def intercept_(self):
"""intercept_ getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = icpt
return self._intercept
@intercept_.setter
def intercept_(self, val):
"""intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of LinearRegression object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of LinearRegression object is not writable")
@check_association
def predict(self, X):
"""
NAME: predict
"""
ret = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
return np.asarray(ret, dtype = np.float64)
# calculate the root mean square value on the given test data and labels.
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
return r2_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
metadata = open(fname + "/metadata", "rb")
self.__mkind, self.__mdtype = pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
metadata = open(fname + "/metadata", "wb")
pickle.dump((self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self.n_samples = None
self._n_iter = None
self.isFitted = None
self.n_features = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
destructs the python object
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
class Lasso(BaseEstimator):
"""A python wrapper of Frovedis Lasso Regression"""
# defaults are as per Frovedis
# lr_rate: Frovedis: 0.01 (added)
# solver: Frovedis: sag (SGD) (added)
def __init__(self, alpha=0.01, fit_intercept=True, normalize=False,
precompute=False, copy_X=True, max_iter=1000,
tol=1e-4, warm_start=False, positive=False,
random_state=None, selection='cyclic',
lr_rate=1e-8, verbose=0, solver='sag'):
self.alpha = alpha
self.fit_intercept = fit_intercept
self.normalize = normalize
self.precompute = precompute
self.copy_X = copy_X
self.max_iter = max_iter
self.tol = tol
self.warm_start = warm_start
self.positive = positive
self.random_state = random_state
self.selection = selection
# extra
self.lr_rate = lr_rate
self.verbose = verbose
self.solver = solver
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = M_KIND.LSR
self._coef = None
self._intercept = None
self.n_samples = None
self.n_features = None
self._n_iter = None
self.isFitted = None
def check_input(self, X, y, F):
"""checks input X"""
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
dense_kind = 'colmajor', densify=False)
X, y = inp_data.get()
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
dtype = inp_data.get_dtype()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
return X, y, dtype, itype, dense
@set_association
def fit(self, X, y, sample_weight=None):
"""
NAME: fit
"""
self.reset_metadata()
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
if self.max_iter is None:
self.max_iter = 1000
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
supported_solver = ['sgd', 'lbfgs']
solver = self.solver
if solver == 'sag':
solver = 'sgd'
if solver not in supported_solver:
raise ValueError( \
"Unknown solver %s for Lasso Regression." % solver)
n_iter = rpclib.lasso(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.lr_rate, \
self.alpha, self.fit_intercept, self.tol, \
self.verbose, self.__mid, dtype, itype, dense, \
solver.encode('ascii'), self.warm_start)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = None
self._intercept = None
self._n_iter = n_iter
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = np.asarray(wgt)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError(\
"attribute 'coef_' of LassoRegression object is not writable")
@property
@check_association
def intercept_(self):
"""intercept_ getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = icpt
return self._intercept
@intercept_.setter
def intercept_(self, val):
"""intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of LassoRegression object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of Lasso Regression object is not writable")
@check_association
def predict(self, X):
"""
NAME: predict
"""
ret = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
return np.asarray(ret, dtype = np.float64)
# calculate the root mean square value on the given test data and labels.
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
return r2_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
metadata = open(fname + "/metadata", "rb")
self.__mkind, self.__mdtype = pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
metadata = open(fname + "/metadata", "wb")
pickle.dump((self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self._n_iter = None
self.n_samples = None
self.isFitted = None
self.n_features = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
destructs the python object
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
class Ridge(BaseEstimator):
"""A python wrapper of Frovedis Ridge Regression"""
# defaults are as per Frovedis
# lr_rate: Frovedis: 0.01 (added)
def __init__(self, alpha=0.01, fit_intercept=True, normalize=False,
copy_X=True, max_iter=None, tol=1e-3, solver='auto',
random_state=None, lr_rate=1e-8, verbose=0,
warm_start = False):
self.alpha = alpha
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.max_iter = max_iter
self.tol = tol
self.solver = solver
self.random_state = random_state
self.warm_start = warm_start
# extra
self.lr_rate = lr_rate
self.verbose = verbose
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = M_KIND.RR
self._coef = None
self._intercept = None
self.n_samples = None
self.n_features = None
self._n_iter = None
self.isFitted = False
def check_input(self, X, y, F):
"""checks input X"""
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
dense_kind = 'colmajor', densify=False)
X, y = inp_data.get()
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
dtype = inp_data.get_dtype()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
return X, y, dtype, itype, dense
@set_association
def fit(self, X, y, sample_weight=None):
"""
NAME: fit
"""
self.reset_metadata()
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
sv = ['svd', 'cholesky', 'lsqr', 'sparse_cg']
if self.solver in sv:
raise ValueError( \
"Frovedis doesn't support solver %s for Ridge "\
"Regression currently." % self.solver)
if self.max_iter is None:
self.max_iter = 1000
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
supported_solver = ['sgd', 'lbfgs']
solver = self.solver
if solver in ['sag', 'auto']:
solver = 'sgd'
if solver not in supported_solver:
raise ValueError( \
"Unknown solver %s for Ridge Regression." % solver)
n_iter = rpclib.ridge(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.lr_rate, \
self.alpha, self.fit_intercept, self.tol, \
self.verbose, self.__mid, \
dtype, itype, dense, solver.encode('ascii'),
self.warm_start)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = None
self._intercept = None
self._n_iter = np.asarray([n_iter], dtype = np.int32)
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = np.asarray(wgt)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError(\
"attribute 'coef_' of Ridge regression object is not writable")
@property
@check_association
def intercept_(self):
"""intercept_ getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = icpt
return self._intercept
@intercept_.setter
def intercept_(self, val):
"""intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of Ridge regression object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of Ridge Regression object is not writable")
@check_association
def predict(self, X):
"""
NAME: predict
"""
ret = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
return np.asarray(ret, dtype = np.float64)
# calculate the root mean square value on the given test data and labels.
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
return r2_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
metadata = open(fname + "/metadata", "rb")
self.__mkind, self.__mdtype = pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
metadata = open(fname + "/metadata", "wb")
pickle.dump((self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self._n_iter = None
self.n_samples = None
self.isFitted = False
self.n_features = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
destructs the python object
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
class SGDClassifier(BaseEstimator):
"""
A python wrapper for SGD classifier
"""
def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, max_iter=1000, tol=1e-3, shuffle=True,
verbose=0, epsilon=0.1, n_jobs=None,
random_state=None, learning_rate="invscaling", eta0=1.0,
power_t=0.5, early_stopping=False, validation_fraction=0.1,
n_iter_no_change=5, class_weight=None, warm_start=False,
average=False):
self.loss = loss
self.penalty = penalty
self.alpha = alpha
self.l1_ratio = l1_ratio
self.fit_intercept = fit_intercept
self.max_iter = max_iter
self.tol = tol
self.shuffle = shuffle
self.verbose = verbose
self.epsilon = epsilon
self.n_jobs = n_jobs
self.random_state = random_state
self.learning_rate = learning_rate
self.eta0 = eta0
self.power_t = power_t
self.early_stopping = early_stopping
self.validation_fraction = validation_fraction
self.n_iter_no_change = n_iter_no_change
self.class_weight = class_weight
self.warm_start = warm_start
self.average = average
# extra
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = None
self.label_map = None
self.n_classes = None
self._classes = None
self._intercept = None
self._coef = None
self._n_iter = None
self.n_samples = None
self.n_features = None
self.isFitted = None
self.is_mult = None
def validate(self):
"""validates hyper parameters"""
if self.power_t != 0.5:
warnings.warn(\
" Parameter power_t has been set to" + str(self.power_t) + \
" However, power_t will be set to 0.5 internally")
if self.learning_rate != 'invscaling':
warnings.warn(" Parameter learning_rate has been set to" + \
str(self.learning_rate) + \
" However, learning_rate will be set to invscaling internally")
if self.alpha < 0:
raise ValueError("alpha must be >= 0")
def check_input(self, X, y, F):
"""checks input X"""
if self.loss == "squared_loss":
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
dense_kind = 'colmajor', densify=False)
X, y = inp_data.get()
else:
# for binary case: frovedis supports -1 and 1
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
encode_label = True, binary_encoder=[-1, 1], \
dense_kind = 'colmajor', densify=False)
X, y, logic = inp_data.get()
self._classes = inp_data.get_distinct_labels()
self.n_classes = len(self._classes)
self.label_map = logic
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
dtype = inp_data.get_dtype()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
return X, y, dtype, itype, dense
@set_association
def fit(self, X, y, coef_init=None, intercept_init=None, \
sample_weight=None):
"""
Fit method for SGDclassifier
"""
self.reset_metadata()
self.validate()
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
rparam = self.alpha
if self.penalty == 'l1':
regTyp = 1
elif self.penalty == 'l2':
regTyp = 2
elif self.penalty == 'none':
regTyp = 0
else:
raise ValueError( \
"Unsupported penalty is provided: ", self.penalty)
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
if self.loss == "log":
self.__mkind = M_KIND.LR
if self.n_classes == 2:
self.is_mult = False
else:
self.is_mult = True
n_iter = rpclib.lr(host, port, X.get(), y.get(),
sample_weight, len(sample_weight), \
self.max_iter, self.eta0, \
regTyp, rparam, self.is_mult, \
self.fit_intercept, self.tol, self.verbose, \
self.__mid, dtype, itype, dense, \
"sgd".encode('ascii'), False, \
self.warm_start)
elif self.loss == "hinge":
if self.n_classes != 2:
raise ValueError("SGDClassifier: loss = 'hinge' supports" + \
" only binary classification!")
self.__mkind = M_KIND.SVM
n_iter = rpclib.svm(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.eta0, \
regTyp, rparam, self.fit_intercept, self.tol, \
self.verbose, self.__mid, dtype, itype, dense, \
'sgd'.encode('ascii'), self.warm_start)
elif self.loss == "squared_loss":
model_kind = [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]
self.__mkind = model_kind[regTyp]
n_iter = rpclib.lnr2_sgd(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.eta0, \
regTyp, rparam, self.fit_intercept, self.tol, \
self.verbose, self.__mid, dtype, itype, dense, \
self.warm_start)
else:
raise ValueError("SGDClassifier: supported losses are log, " + \
"hinge and squared_loss only!")
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = None
self._intercept = None
self._n_iter = n_iter
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
if not self.is_mult:
n_features = len(wgt)
shape = (1, n_features)
else: # MLR case
n_features = len(wgt) // self.n_classes
shape = (self.n_classes, n_features)
self._coef = np.asarray(wgt).reshape(shape)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError(\
"attribute 'coef_' of SGDClassifier object is not writable")
@property
@check_association
def intercept_(self):
"""intercept_ getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = np.asarray(icpt)
return self._intercept
@intercept_.setter
def intercept_(self, val):
"""intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of SGDClassifier object is not writable")
@property
def classes_(self):
"""classes_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'classes_' might have been " \
"released or called before fit")
if self.__mkind in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]:
raise AttributeError(\
"attribute 'classes_' is not available for squared_loss")
if self._classes is None:
self._classes = np.sort(list(self.label_map.values()))
return self._classes
@classes_.setter
def classes_(self, val):
"""classes_ setter"""
raise AttributeError(\
"attribute 'classes_' of SGDClassifier object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if self.__mid is None:
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of SGDClassifier object is not writable")
def predict(self, X):
"""
NAME: predict for SGD classifier
"""
frov_pred = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
if self.__mkind in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]:
return np.asarray(frov_pred, dtype=np.float64)
else:
return np.asarray([self.label_map[frov_pred[i]] \
for i in range(0, len(frov_pred))])
@check_association
def predict_proba(self, X):
"""
NAME: predict_proba
"""
if self.__mkind in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR, M_KIND.SVM]:
raise AttributeError("attribute 'predict_proba' is not " \
"available for %s loss" % (self.loss))
proba = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, True, self.n_classes)
n_samples = len(proba) // self.n_classes
shape = (n_samples, self.n_classes)
return np.asarray(proba, dtype=np.float64).reshape(shape)
# calculate the mean accuracy on the given test data and labels.
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
if self.__mkind in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]:
return r2_score(y, self.predict(X), sample_weight=sample_weight)
else:
return accuracy_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
if self.__mkind not in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]:
target = open(fname + "/label_map", "rb")
self.label_map = pickle.load(target)
target.close()
self._classes = np.sort(list(self.label_map.values()))
self.n_classes = len(self._classes)
metadata = open(fname + "/metadata", "rb")
self.loss, self.__mkind, self.__mdtype = \
pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
if self.__mkind not in [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]:
target = open(fname + "/label_map", "wb")
pickle.dump(self.label_map, target)
target.close()
metadata = open(fname + "/metadata", "wb")
pickle.dump(\
(self.loss, self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print for SGD classifier
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self._classes = None
self._n_iter = None
self.n_samples = None
self.isFitted = None
self.is_mult = None
self.n_features = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
NAME: __del__
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
class SGDRegressor(BaseEstimator):
"""
A python wrapper for SGD regressor
"""
def __init__(self, loss="squared_loss", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, max_iter=1000, tol=1e-3, shuffle=True,
verbose=0, epsilon=0.1, random_state=None, learning_rate="invscaling",
eta0=0.001, power_t=0.25, early_stopping=False, validation_fraction=0.1,
n_iter_no_change=5, warm_start=False, average=False):
self.loss = loss
self.penalty = penalty
self.alpha = alpha
self.l1_ratio = l1_ratio
self.fit_intercept = fit_intercept
self.max_iter = max_iter
self.tol = tol
self.shuffle = shuffle
self.verbose = verbose
self.epsilon = epsilon
self.random_state = random_state
self.learning_rate = learning_rate
self.eta0 = eta0
self.power_t = power_t
self.early_stopping = early_stopping
self.validation_fraction = validation_fraction
self.n_iter_no_change = n_iter_no_change
self.warm_start = warm_start
self.average = average
# extra
self.__mid = ModelID.get()
self.__mdtype = None
self.__mkind = None
self._intercept = None
self._coef = None
self.n_samples = None
self._n_iter = None
self.n_features = None
self.isFitted = None
def check_input(self, X, y, F):
"""checks input X"""
inp_data = FrovedisLabeledPoint(X, y, \
caller = "[" + self.__class__.__name__ + "] " + F + ": ",\
dense_kind = 'colmajor', densify=False)
X, y = inp_data.get()
self.n_samples = inp_data.numRows()
self.n_features = inp_data.numCols()
dtype = inp_data.get_dtype()
itype = inp_data.get_itype()
dense = inp_data.is_dense()
return X, y, dtype, itype, dense
def validate(self):
"""
NAME: validate
"""
if self.tol < 0:
raise ValueError("fit: tol parameter must be zero or positive!")
if self.max_iter <= 0:
raise ValueError("fit: max_iter must be a positive value!")
if self.power_t != 0.25:
warnings.warn(\
" Parameter power_t has been set to" + str(self.power_t) + \
" However, power_t will be set to 0.25 internally")
supported_learning_rate = ("invscaling", "optimal")
if self.learning_rate not in supported_learning_rate:
raise ValueError("fit: Unsupported learning_rate : " + str(self.learning_rate))
if self.alpha < 0:
raise ValueError("fit: alpha must be >= 0")
if self.eta0 < 0:
raise ValueError("fit: eta0 parameter must be zero or positive!")
@set_association
def fit(self, X, y, coef_init=None, intercept_init=None, \
sample_weight=None):
"""
Fit method for SGDRegressor
"""
# release old model, if any
self.reset_metadata()
self.validate()
# perform the fit
X, y, dtype, itype, dense = self.check_input(X, y, "fit")
self.__mdtype = dtype
if self.penalty == 'l1':
regTyp = 1
elif self.penalty == 'l2':
regTyp = 2
elif self.penalty == 'none':
regTyp = 0
else:
raise ValueError( \
"Unsupported penalty is provided: ", self.penalty)
svrloss = {'epsilon_insensitive': 1,
'squared_epsilon_insensitive': 2}
lnrloss = {'squared_loss': 1}
sample_weight = check_sample_weight(self, sample_weight)
(host, port) = FrovedisServer.getServerInstance()
if self.loss in list(svrloss.keys()):
self.__mkind = M_KIND.SVR
intLoss = svrloss[self.loss]
if self.epsilon < 0:
raise ValueError("fit: epsilon parameter must be zero or positive!")
n_iter = rpclib.svm_regressor(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.eta0, \
self.epsilon, regTyp, self.alpha, \
self.fit_intercept, self.tol, \
intLoss, self.verbose, \
self.__mid, dtype, itype, dense, \
"sgd".encode('ascii'), self.warm_start)
elif self.loss in list(lnrloss.keys()):
model_kind = [M_KIND.LNRM, M_KIND.LSR, M_KIND.RR]
self.__mkind = model_kind[regTyp]
n_iter = rpclib.lnr2_sgd(host, port, X.get(), y.get(), \
sample_weight, len(sample_weight), \
self.max_iter, self.eta0, \
regTyp, self.alpha, \
self.fit_intercept, self.tol, \
self.verbose, self.__mid, dtype, itype, dense, \
"sgd".encode('ascii'), self.warm_start)
else:
raise ValueError(\
"fit: supported losses are epsilon_insensitive, " \
+ "squared_epsilon_insensitive and squared_loss!")
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = None
self._intercept = None
self._n_iter = n_iter
self.isFitted = True
return self
@property
@check_association
def coef_(self):
"""coef_ getter"""
if self._coef is None:
(host, port) = FrovedisServer.getServerInstance()
wgt = rpclib.get_weight_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._coef = np.asarray(wgt)
return self._coef
@coef_.setter
def coef_(self, val):
"""coef_ setter"""
raise AttributeError(\
"attribute 'coef_' of SGDRegressor object is not writable")
@property
@check_association
def intercept_(self):
"""intercept_ getter"""
if self._intercept is None:
(host, port) = FrovedisServer.getServerInstance()
icpt = rpclib.get_intercept_vector(host, port, self.__mid, \
self.__mkind, self.__mdtype)
excpt = rpclib.check_server_exception()
if excpt["status"]:
raise RuntimeError(excpt["info"])
self._intercept = np.asarray(icpt)
return self._intercept
@intercept_.setter
def intercept_(self, val):
"""intercept_ setter"""
raise AttributeError(\
"attribute 'intercept_' of SGDRegressor object is not writable")
@property
def n_iter_(self):
"""n_iter_ getter"""
if not self.is_fitted():
raise AttributeError("attribute 'n_iter_'" \
"might have been released or called before fit")
return self._n_iter
@n_iter_.setter
def n_iter_(self, val):
"""n_iter_ setter"""
raise AttributeError(\
"attribute 'n_iter_' of SGDRegressor object is not writable")
@check_association
def predict(self, X):
"""
NAME: predict for SGDRegressor
"""
ret = GLM.predict(X, self.__mid, self.__mkind, \
self.__mdtype, False)
return np.asarray(ret, dtype=np.float64)
def score(self, X, y, sample_weight=None):
"""
NAME: score
"""
return r2_score(y, self.predict(X), sample_weight=sample_weight)
@set_association
def load(self, fname, dtype=None):
"""
NAME: load
"""
if not os.path.exists(fname):
raise ValueError(\
"the model with name %s does not exist!" % fname)
self.reset_metadata()
metadata = open(fname + "/metadata", "rb")
self.loss, self.__mkind, self.__mdtype = pickle.load(metadata)
metadata.close()
if dtype is not None:
mdt = TypeUtil.to_numpy_dtype(self.__mdtype)
if dtype != mdt:
raise ValueError("load: type mismatches detected! " + \
"expected type: " + str(mdt) + \
"; given type: " + str(dtype))
GLM.load(self.__mid, self.__mkind, self.__mdtype, fname+"/model")
self.isFitted = True
return self
@check_association
def save(self, fname):
"""
NAME: save
"""
if os.path.exists(fname):
raise ValueError(\
"another model with %s name already exists!" % fname)
os.makedirs(fname)
GLM.save(self.__mid, self.__mkind, self.__mdtype, fname + "/model")
metadata = open(fname + "/metadata", "wb")
pickle.dump(\
(self.loss, self.__mkind, self.__mdtype), metadata)
metadata.close()
@check_association
def debug_print(self):
"""
NAME: debug_print for SGDRegressor
"""
GLM.debug_print(self.__mid, self.__mkind, self.__mdtype)
def reset_metadata(self):
"""
resets after-fit populated attributes to None
"""
self._coef = None
self._intercept = None
self._n_iter = None
self.n_samples = None
self.isFitted = None
self.n_features = None
def release(self):
"""
resets after-fit populated attributes to None
along with relasing server side memory
"""
self.__release_server_heap()
self.reset_metadata()
@do_if_active_association
def __release_server_heap(self):
"""
to release model pointer from server heap
"""
GLM.release(self.__mid, self.__mkind, self.__mdtype)
def __del__(self):
"""
NAME: __del__
"""
self.reset_metadata()
def is_fitted(self):
""" function to confirm if the model is already fitted """
return self.isFitted
| 35.332569
| 91
| 0.556475
| 6,880
| 61,620
| 4.738517
| 0.051744
| 0.025521
| 0.022331
| 0.032637
| 0.872519
| 0.862796
| 0.82841
| 0.813349
| 0.801816
| 0.795927
| 0
| 0.006316
| 0.339662
| 61,620
| 1,743
| 92
| 35.35284
| 0.794888
| 0.074278
| 0
| 0.837587
| 0
| 0
| 0.093033
| 0.000979
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094354
| false
| 0
| 0.009281
| 0
| 0.154679
| 0.009281
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d8623902773f04596f3c65c6276534b7af34a9f
| 200
|
py
|
Python
|
kerbal_api/__init__.py
|
obi1kenobi/kerbal-api
|
930880ac0bc55e3902e9949053a93a6b8a04efee
|
[
"MIT"
] | 3
|
2020-07-01T23:16:58.000Z
|
2020-07-29T06:30:31.000Z
|
kerbal_api/__init__.py
|
obi1kenobi/kerbal-api
|
930880ac0bc55e3902e9949053a93a6b8a04efee
|
[
"MIT"
] | null | null | null |
kerbal_api/__init__.py
|
obi1kenobi/kerbal-api
|
930880ac0bc55e3902e9949053a93a6b8a04efee
|
[
"MIT"
] | null | null | null |
from .querying import KSP_SCHEMA, KSP_SCHEMA_TEXT, execute_query, get_default_adapter
__all__ = [
"KSP_SCHEMA",
"KSP_SCHEMA_TEXT",
"execute_query",
"get_default_adapter",
]
| 20
| 86
| 0.695
| 24
| 200
| 5.125
| 0.5
| 0.292683
| 0.195122
| 0.292683
| 0.829268
| 0.829268
| 0.829268
| 0.829268
| 0.829268
| 0.829268
| 0
| 0
| 0.21
| 200
| 9
| 87
| 22.222222
| 0.778481
| 0
| 0
| 0
| 0
| 0
| 0.298429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
d519b6a11830204297846942a9b6101e45082878
| 88
|
py
|
Python
|
Python3/TuplesAndSets/creating_accessing_set.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/TuplesAndSets/creating_accessing_set.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/TuplesAndSets/creating_accessing_set.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
s = set({1, 2, 3, 4, 5, 5, 4})
print(f'{s =}')
print(f'{3 in s =}')
print(f'{9 in s =}')
| 22
| 30
| 0.431818
| 22
| 88
| 1.727273
| 0.5
| 0.473684
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128571
| 0.204545
| 88
| 4
| 31
| 22
| 0.414286
| 0
| 0
| 0
| 0
| 0
| 0.280899
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d58cb5c7570729f77ec3ae424a89df63fd3aea49
| 6,532
|
py
|
Python
|
tests/commands/test_iotserver.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | 2
|
2019-08-16T13:05:39.000Z
|
2019-12-24T16:57:29.000Z
|
tests/commands/test_iotserver.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_iotserver.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for our `iotorch IoT Server` subcommand."""
from subprocess import PIPE, Popen as popen
from unittest import TestCase
class TestIotServer(TestCase):
def test_returns_iotserver_get(self):
name='test'
operation='get'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(name.encode('utf-8') in output)
def test_returns_iotserver_get_file_does_not_exit(self):
name='test'
operation='get'
text='Nothing to get'
configfile='./tests/conf/iotorch_not_exist.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_create(self):
name='server1'
cluster='test1'
iotslice='test1'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= "IoT Server " + name + " created"
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(cluster.encode('utf-8') in output)
def test_returns_iotserver_create_file_does_not_exist(self):
name='server2'
cluster='test1'
iotslice='test1'
operation='create'
configfile='./tests/conf/iotorch_test.toml'
text= 'Cluster does not exist'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
text= 'Nothing to get'
operation='get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_create_cluster_does_not_exist(self):
name='server3'
cluster='ghost'
iotslice='test1'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= 'Cluster does not exist'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
text= 'Nothing to get'
operation='get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotgateway_create_slice_does_not_exist(self):
name='gateway3'
cluster='test1'
iotslice='ghost'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= 'Slice does not exist'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
text= 'Nothing to get'
operation='get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_set(self):
name='test1'
username='test@user.com'
password='testpassword'
operation='set'
text= "IoT Server " + name + " set"
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--user='+username, '--password='+password, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(username.encode('utf-8') in output)
def test_returns_iotserver_delete(self):
name='server1'
operation='delete'
text= "IoT Server " + name + " deleted"
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
text='Nothing to get'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_delete_device_does_not_exist(self):
name='ghost'
operation='delete'
text='Nothing to delete'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_delete_file_does_not_exist(self):
name='server1'
operation='delete'
text='Nothing to delete'
configfile='./tests/conf/iotorch_not_exist.toml'
output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotserver_list(self):
name='test'
operation='list'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotserver', operation, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(name.encode('utf-8') in output)
def test_returns_iotserver_list_file_does_not_exist(self):
name='test'
operation='list'
text='Nothing to list'
configfile='./tests/conf/iotorch_not_exist.toml'
output = popen(['iotorch', 'iotserver', operation, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
| 48.029412
| 177
| 0.638855
| 726
| 6,532
| 5.650138
| 0.092287
| 0.048269
| 0.078986
| 0.118479
| 0.891516
| 0.849829
| 0.812287
| 0.812287
| 0.800341
| 0.800341
| 0
| 0.009266
| 0.190447
| 6,532
| 135
| 178
| 48.385185
| 0.766452
| 0.007042
| 0
| 0.709402
| 0
| 0
| 0.246334
| 0.051706
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.102564
| false
| 0.017094
| 0.017094
| 0
| 0.128205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d599f5e6a617d4ef07c74eb5ef4f96c53b9cd66a
| 5,816
|
py
|
Python
|
kikimr/public/api/grpc/draft/ydb_long_tx_v1_pb2_grpc.py
|
yandex-cloud/ydb-python-sdk
|
0df2dce2d77fc41ad3020072740f51dd91630177
|
[
"Apache-2.0"
] | 19
|
2019-07-01T08:25:29.000Z
|
2022-01-26T14:46:51.000Z
|
kikimr/public/api/grpc/draft/ydb_long_tx_v1_pb2_grpc.py
|
yandex-cloud/ydb-python-sdk
|
0df2dce2d77fc41ad3020072740f51dd91630177
|
[
"Apache-2.0"
] | 5
|
2019-07-02T13:36:42.000Z
|
2021-09-14T06:46:48.000Z
|
kikimr/public/api/grpc/draft/ydb_long_tx_v1_pb2_grpc.py
|
yandex-cloud/ydb-python-sdk
|
0df2dce2d77fc41ad3020072740f51dd91630177
|
[
"Apache-2.0"
] | 10
|
2019-06-07T10:36:19.000Z
|
2021-10-15T08:58:11.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from kikimr.public.api.protos.draft import ydb_long_tx_pb2 as kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2
class LongTxServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.BeginTx = channel.unary_unary(
'/Ydb.LongTx.V1.LongTxService/BeginTx',
request_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.BeginTransactionRequest.SerializeToString,
response_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.BeginTransactionResponse.FromString,
)
self.CommitTx = channel.unary_unary(
'/Ydb.LongTx.V1.LongTxService/CommitTx',
request_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.CommitTransactionRequest.SerializeToString,
response_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.CommitTransactionResponse.FromString,
)
self.RollbackTx = channel.unary_unary(
'/Ydb.LongTx.V1.LongTxService/RollbackTx',
request_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.RollbackTransactionRequest.SerializeToString,
response_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.RollbackTransactionResponse.FromString,
)
self.Write = channel.unary_unary(
'/Ydb.LongTx.V1.LongTxService/Write',
request_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.WriteRequest.SerializeToString,
response_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.WriteResponse.FromString,
)
self.Read = channel.unary_unary(
'/Ydb.LongTx.V1.LongTxService/Read',
request_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.ReadRequest.SerializeToString,
response_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.ReadResponse.FromString,
)
class LongTxServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def BeginTx(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CommitTx(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RollbackTx(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Write(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Read(self, request, context):
"""rpc ResolveNodes(ResolveNodesRequest) returns (stream ResolveNodesResponse);
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_LongTxServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'BeginTx': grpc.unary_unary_rpc_method_handler(
servicer.BeginTx,
request_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.BeginTransactionRequest.FromString,
response_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.BeginTransactionResponse.SerializeToString,
),
'CommitTx': grpc.unary_unary_rpc_method_handler(
servicer.CommitTx,
request_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.CommitTransactionRequest.FromString,
response_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.CommitTransactionResponse.SerializeToString,
),
'RollbackTx': grpc.unary_unary_rpc_method_handler(
servicer.RollbackTx,
request_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.RollbackTransactionRequest.FromString,
response_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.RollbackTransactionResponse.SerializeToString,
),
'Write': grpc.unary_unary_rpc_method_handler(
servicer.Write,
request_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.WriteRequest.FromString,
response_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.WriteResponse.SerializeToString,
),
'Read': grpc.unary_unary_rpc_method_handler(
servicer.Read,
request_deserializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.ReadRequest.FromString,
response_serializer=kikimr_dot_public_dot_api_dot_protos_dot_draft_dot_ydb__long__tx__pb2.ReadResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Ydb.LongTx.V1.LongTxService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 50.573913
| 146
| 0.801238
| 705
| 5,816
| 6.024113
| 0.129078
| 0.036261
| 0.046621
| 0.062162
| 0.808571
| 0.808571
| 0.808571
| 0.715564
| 0.715564
| 0.715564
| 0
| 0.005568
| 0.135316
| 5,816
| 114
| 147
| 51.017544
| 0.838934
| 0.091644
| 0
| 0.298851
| 1
| 0
| 0.089473
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08046
| false
| 0.068966
| 0.022989
| 0
| 0.126437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6394f96c4c067810038cbfa9b589feaea20bc607
| 6,111
|
py
|
Python
|
pyaz/storage/table/policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/storage/table/policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/storage/table/policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage shared access policies of a storage table.
'''
from .... pyaz_utils import _call_az
def create(name, table_name, account_key=None, account_name=None, connection_string=None, expiry=None, permissions=None, sas_token=None, start=None):
'''
Required Parameters:
- name -- The stored access policy name.
- table_name -- The container name.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- expiry -- expiration UTC datetime in (Y-m-d'T'H:M:S'Z')
- permissions -- Allowed values: (r)ead/query (a)dd (u)pdate (d)elete. Can be combined.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
- start -- start UTC datetime (Y-m-d'T'H:M:S'Z'). Defaults to time of request.
'''
return _call_az("az storage table policy create", locals())
def delete(name, table_name, account_key=None, account_name=None, connection_string=None, sas_token=None):
'''
Required Parameters:
- name -- The stored access policy name.
- table_name -- The container name.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
'''
return _call_az("az storage table policy delete", locals())
def show(name, table_name, account_key=None, account_name=None, connection_string=None, sas_token=None):
'''
Required Parameters:
- name -- The stored access policy name.
- table_name -- The container name.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
'''
return _call_az("az storage table policy show", locals())
def list(table_name, account_key=None, account_name=None, connection_string=None, sas_token=None):
'''
Required Parameters:
- table_name -- The container name.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
'''
return _call_az("az storage table policy list", locals())
def update(name, table_name, account_key=None, account_name=None, connection_string=None, expiry=None, permissions=None, sas_token=None, start=None):
'''
Required Parameters:
- name -- The stored access policy name.
- table_name -- The container name.
Optional Parameters:
- account_key -- Storage account key. Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_KEY
- account_name -- Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be used in conjunction with either storage account key or a SAS token. If neither are present, the command will try to query the storage account key using the authenticated Azure account. If a large number of storage commands are executed the API quota may be hit
- connection_string -- Storage account connection string. Environment variable: AZURE_STORAGE_CONNECTION_STRING
- expiry -- expiration UTC datetime in (Y-m-d'T'H:M:S'Z')
- permissions -- Allowed values: (r)ead/query (a)dd (u)pdate (d)elete. Can be combined.
- sas_token -- A Shared Access Signature (SAS). Must be used in conjunction with storage account name. Environment variable: AZURE_STORAGE_SAS_TOKEN
- start -- start UTC datetime (Y-m-d'T'H:M:S'Z'). Defaults to time of request.
'''
return _call_az("az storage table policy update", locals())
| 64.326316
| 365
| 0.757814
| 874
| 6,111
| 5.169336
| 0.108696
| 0.123949
| 0.106242
| 0.137229
| 0.963922
| 0.963922
| 0.963922
| 0.963922
| 0.963922
| 0.963922
| 0
| 0
| 0.173621
| 6,111
| 94
| 366
| 65.010638
| 0.894653
| 0.791687
| 0
| 0
| 0
| 0
| 0.146146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
63b0588bc27a1affe2d5a955537728ad8f95f5be
| 834
|
py
|
Python
|
tests/data/python38.py
|
AppliedIntuition/black
|
bc0c5ca9d41956bf5ed9fc851202579f9e52a338
|
[
"MIT"
] | 16,110
|
2019-07-22T21:54:54.000Z
|
2022-03-31T22:52:39.000Z
|
tests/data/python38.py
|
marnixah/black-but-usable
|
83b83d3066d1d857983bfa1a666a409e7255d79d
|
[
"MIT"
] | 1,981
|
2019-07-22T21:26:16.000Z
|
2022-03-31T23:14:35.000Z
|
tests/data/python38.py
|
marnixah/black-but-usable
|
83b83d3066d1d857983bfa1a666a409e7255d79d
|
[
"MIT"
] | 1,762
|
2019-07-22T21:23:00.000Z
|
2022-03-31T06:10:22.000Z
|
#!/usr/bin/env python3.8
def starred_return():
my_list = ["value2", "value3"]
return "value1", *my_list
def starred_yield():
my_list = ["value2", "value3"]
yield "value1", *my_list
# all right hand side expressions allowed in regular assignments are now also allowed in
# annotated assignments
a : Tuple[ str, int] = "1", 2
a: Tuple[int , ... ] = b, *c, d
def t():
a : str = yield "a"
# output
#!/usr/bin/env python3.8
def starred_return():
my_list = ["value2", "value3"]
return "value1", *my_list
def starred_yield():
my_list = ["value2", "value3"]
yield "value1", *my_list
# all right hand side expressions allowed in regular assignments are now also allowed in
# annotated assignments
a: Tuple[str, int] = "1", 2
a: Tuple[int, ...] = b, *c, d
def t():
a: str = yield "a"
| 18.130435
| 88
| 0.625899
| 123
| 834
| 4.146341
| 0.308943
| 0.094118
| 0.094118
| 0.141176
| 0.988235
| 0.988235
| 0.988235
| 0.988235
| 0.988235
| 0.988235
| 0
| 0.030722
| 0.219424
| 834
| 45
| 89
| 18.533333
| 0.752688
| 0.32494
| 0
| 1
| 0
| 0
| 0.136691
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
63c05fa8b291376fa8be30f5ccefe0ea3a00756c
| 89
|
py
|
Python
|
__init__.py
|
softlab-unimore/landmark
|
afebdd1e78d4a99caaa7a43cc72f95cb7ece8bbb
|
[
"MIT"
] | null | null | null |
__init__.py
|
softlab-unimore/landmark
|
afebdd1e78d4a99caaa7a43cc72f95cb7ece8bbb
|
[
"MIT"
] | null | null | null |
__init__.py
|
softlab-unimore/landmark
|
afebdd1e78d4a99caaa7a43cc72f95cb7ece8bbb
|
[
"MIT"
] | null | null | null |
from landmark.landmark import Landmark
from landmark.landmark.plot import PlotExplanation
| 44.5
| 50
| 0.88764
| 11
| 89
| 7.181818
| 0.454545
| 0.303797
| 0.506329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 89
| 2
| 50
| 44.5
| 0.963415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
893f928d3d8fe5887c05b90ad7845f1a9f7e62a3
| 22,999
|
py
|
Python
|
devilry/devilry_qualifiesforexam/tests/test_showstatus.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/devilry_qualifiesforexam/tests/test_showstatus.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/devilry_qualifiesforexam/tests/test_showstatus.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
# -*- coding: utf-8 -*-
# 3rd party imports
from model_bakery import baker
# Django imports
from django import test
# CrAdmin imports
from cradmin_legacy import cradmin_testhelpers
# Devilry imports
from devilry.project.common import settings
from devilry.devilry_qualifiesforexam.views import qualification_preview_view
from devilry.devilry_qualifiesforexam import models as status_models
class TestQualificationStatusView(test.TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = qualification_preview_view.QualificationStatusView
def test_get(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_getrequest(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertEqual(mockresponse.response.status_code, 200)
def test_get_retracted_message(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status',
period=testperiod,
status=status_models.Status.NOTREADY,
message='retracted')
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
retracted_message_element = mockresponse.selector.one('#devilry_qualifiesforexam_retracted_message')
self.assertTrue(retracted_message_element)
self.assertEqual(retracted_message_element.alltext_normalized, 'retracted')
def test_get_back_button(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertTrue(mockresponse.selector.one('#devilry_qualifiesforexam_back_index_button'))
def test_get_retract_button_link(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod, status=status_models.Status.READY)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertTrue(mockresponse.selector.one('#devilry_qualifiesforexam_retract_link'))
def test_no_retract_button_when_status_is_not_ready(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod,
status=status_models.Status.NOTREADY)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertFalse(mockresponse.selector.exists('#devilry_qualifiesforexam_retract_link'))
def test_get_print_button_link(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod, status=status_models.Status.READY)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertTrue(mockresponse.selector.one('#devilry_qualifiesforexam_print_link'))
def test_no_print_button_when_status_is_not_ready(self):
testperiod = baker.make('core.Period')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod,
status=status_models.Status.NOTREADY)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
self.assertFalse(mockresponse.selector.exists('#devilry_qualifiesforexam_print_link'))
def test_num_queries(self):
testperiod = baker.make('core.Period')
admin_user = baker.make(settings.AUTH_USER_MODEL)
teststatus = baker.make('devilry_qualifiesforexam.Status',
period=testperiod,
status=status_models.Status.READY,
user=admin_user,
plugin='someplugin')
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
status=teststatus,
qualifies=True,
_quantity=10)
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
status=teststatus,
qualifies=False,
_quantity=10)
with self.assertNumQueries(4):
self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
requestuser=admin_user,
viewkwargs={
'statusid': teststatus.id
},
)
class TestQualificationStatusPreviewTableRendering(test.TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = qualification_preview_view.QualificationStatusView
def test_table_is_rendered(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
self.assertTrue(mockresponse.selector.exists('.devilry-qualifiesforexam-table'))
def test_table_row_is_rendered(self):
# Tests that two rows are rendered, on for the header and one for the student
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
self.assertEqual(len(mockresponse.selector.list('.devilry-qualifiesforexam-tr')), 2)
def test_table_row_is_rendered_multiple_students(self):
# Tests that 21 rows are rendered, one for the table header and twenty(one for each student)
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod, _quantity=20)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
self.assertEqual(len(mockresponse.selector.list('.devilry-qualifiesforexam-tr')), 21)
def test_table_data_studentinfo_is_rendered(self):
# Tests that a td element of class 'devilry-qualifiesforexam-cell-studentinfo' is rendered.
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
self.assertEqual(len(mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')), 1)
def test_table_data_qualify_result_is_rendered(self):
# Tests that a td element of class 'devilry-qualifiesforexam-cell-qualify' is rendered.
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
self.assertEqual(len(mockresponse.selector.list('.devilry-qualifiesforexam-cell-qualify')), 1)
def test_table_header_cell_data(self):
# Test a more complete example of data contained in cells for two students, one qualifying and one not.
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent', period=testperiod)
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
table_headers = mockresponse.selector.list('.devilry-qualifiesforexam-th')
self.assertEqual(table_headers[0].alltext_normalized, 'Student')
self.assertEqual(table_headers[1].alltext_normalized, 'Qualified for final exams')
def test_table_student_row_data_student_does_not_qualify(self):
# Test a more complete example of data contained in cells for two students, one qualifying and one not.
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
period=testperiod,
user=baker.make(settings.AUTH_USER_MODEL,
fullname='Jane Doe',
shortname='janedoe'))
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
status=teststatus,
relatedstudent=relatedstudent,
qualifies=False)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [],
'plugintypeid': 'someplugin_id'
})
studentinfo = mockresponse.selector.one('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(studentinfo.alltext_normalized, '{} {}'.format(relatedstudent.user.fullname,
relatedstudent.user.shortname))
self.assertEqual(mockresponse.selector.one('.devilry-qualifiesforexam-cell-qualify').alltext_normalized, 'NO')
def test_table_student_row_data_student_qualifies(self):
# Test a more complete example of data contained in cells for two students, one qualifying and one not.
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent',
period=testperiod,
user=baker.make(settings.AUTH_USER_MODEL,
fullname='Jane Doe',
shortname='janedoe'))
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
status=teststatus,
relatedstudent=relatedstudent,
qualifies=True)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
sessionmock={
'qualifying_assignmentids': [],
'passing_relatedstudentids': [relatedstudent.id],
'plugintypeid': 'someplugin_id'
})
studentinfo = mockresponse.selector.one('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(studentinfo.alltext_normalized, '{} {}'.format(relatedstudent.user.fullname,
relatedstudent.user.shortname))
self.assertEqual(mockresponse.selector.one('.devilry-qualifiesforexam-cell-qualify').alltext_normalized, 'YES')
def __make_related_student(self, period, fullname, lastname, shortname, candidate_id=None):
user = baker.make(settings.AUTH_USER_MODEL, fullname=fullname, lastname=lastname, shortname=shortname)
relatedstudent = baker.make('core.RelatedStudent', period=period, user=user, candidate_id=candidate_id)
return relatedstudent
def __make_qualification_item(self, status, relatedstudent, qualifies=True):
return baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
status=status, relatedstudent=relatedstudent, qualifies=qualifies)
def test_table_default_ordering_lastname(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='A C', lastname='C', shortname='ac@example.com')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='bb@example.com')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='C A', lastname='A', shortname='ca@example.com')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'C A ca@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'B B bb@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'A C ac@example.com')
def test_table_default_ordering_lastname_if_order_by_param_is_not_supported(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='A C', lastname='C', shortname='ac@example.com')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='bb@example.com')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='C A', lastname='A', shortname='ca@example.com')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
requestkwargs={'data': {'order_by': 'asd'}})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'C A ca@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'B B bb@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'A C ac@example.com')
def test_table_order_by_lastname_sanity(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='A C', lastname='C', shortname='ac@example.com')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='bb@example.com')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='C A', lastname='A', shortname='ca@example.com')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
requestkwargs={'data': {'order_by': 'lastname'}})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'C A ca@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'B B bb@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'A C ac@example.com')
def test_table_order_by_username(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='C A', lastname='A', shortname='ca@example.com')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='bb@example.com')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='A C', lastname='C', shortname='ac@example.com')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
requestkwargs={'data': {'order_by': 'username'}})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'A C ac@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'B B bb@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'C A ca@example.com')
def test_table_order_by_fullname(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='C A', lastname='A', shortname='a@example.com')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='b@example.com')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='A C', lastname='C', shortname='c@example.com')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
requestkwargs={'data': {'order_by': 'fullname'}})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'A C c@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'B B b@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'C A a@example.com')
def test_table_order_by_candidate_id(self):
testperiod = baker.make('core.Period')
relatedstudent1 = self.__make_related_student(
period=testperiod, fullname='C C', lastname='C', shortname='c@example.com', candidate_id='1')
relatedstudent2 = self.__make_related_student(
period=testperiod, fullname='B B', lastname='B', shortname='b@example.com', candidate_id='3')
relatedstudent3 = self.__make_related_student(
period=testperiod, fullname='A A', lastname='A', shortname='a@example.com', candidate_id='2')
teststatus = baker.make('devilry_qualifiesforexam.Status', period=testperiod)
self.__make_qualification_item(teststatus, relatedstudent1)
self.__make_qualification_item(teststatus, relatedstudent2)
self.__make_qualification_item(teststatus, relatedstudent3)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testperiod,
viewkwargs={
'statusid': teststatus.id
},
requestkwargs={'data': {'order_by': 'candidateid'}})
student_list = mockresponse.selector.list('.devilry-qualifiesforexam-cell-studentinfo')
self.assertEqual(len(student_list), 3)
self.assertEqual(student_list[0].alltext_normalized, 'C C c@example.com')
self.assertEqual(student_list[1].alltext_normalized, 'A A a@example.com')
self.assertEqual(student_list[2].alltext_normalized, 'B B b@example.com')
| 51.79955
| 120
| 0.649202
| 2,182
| 22,999
| 6.601742
| 0.083868
| 0.038737
| 0.027976
| 0.059979
| 0.878098
| 0.866644
| 0.844498
| 0.827213
| 0.806109
| 0.798056
| 0
| 0.008559
| 0.253272
| 22,999
| 443
| 121
| 51.916479
| 0.830208
| 0.031958
| 0
| 0.729592
| 0
| 0
| 0.1697
| 0.0964
| 0
| 0
| 0
| 0
| 0.112245
| 1
| 0.061224
| false
| 0.020408
| 0.015306
| 0.002551
| 0.091837
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89903e7bb0718f29bd54eaa9bba272ee4b1884e0
| 55
|
py
|
Python
|
03_numbers/014_floats.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | 1
|
2020-04-17T20:12:55.000Z
|
2020-04-17T20:12:55.000Z
|
03_numbers/014_floats.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | null | null | null |
03_numbers/014_floats.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | null | null | null |
#working with floats
print(1.5 + 2.7)
print(1.5 * 2.5)
| 13.75
| 20
| 0.636364
| 13
| 55
| 2.692308
| 0.615385
| 0.342857
| 0.4
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0.163636
| 55
| 4
| 21
| 13.75
| 0.586957
| 0.345455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
9823b6474214ff3054e3d2f82c32f47f6d2e24cd
| 2,769
|
py
|
Python
|
tests/test_datastores.py
|
DalavanCloud/nurl-2g
|
dff81942c585003581a5b55a694ad71ae5c519d7
|
[
"BSD-2-Clause"
] | 1
|
2019-03-16T05:04:59.000Z
|
2019-03-16T05:04:59.000Z
|
tests/test_datastores.py
|
DalavanCloud/nurl-2g
|
dff81942c585003581a5b55a694ad71ae5c519d7
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_datastores.py
|
DalavanCloud/nurl-2g
|
dff81942c585003581a5b55a694ad71ae5c519d7
|
[
"BSD-2-Clause"
] | null | null | null |
import os
import unittest
import operator
from nurl import datastores
IS_RUNNING_ON_TRAVISCI = os.environ.get('TRAVIS', False)
class InMemoryTests(unittest.TestCase):
def setUp(self):
self.store = datastores.InMemoryDataStore()
def test_add_new_pair(self):
self.store['foo'] = 'bar'
self.assertEqual(self.store['foo'], 'bar')
def test_add_new_value_on_existing_key(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedKeyError,
lambda: operator.setitem(self.store, 'foo', 'baz'))
def test_add_existing_value_on_new_key(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedValueError,
lambda: operator.setitem(self.store, 'baz', 'bar'))
def test_add_existing_pair(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedKeyError,
lambda: operator.setitem(self.store, 'foo', 'bar'))
def test_get_key_for_value(self):
self.store['foo'] = 'bar'
self.assertEqual(self.store.key('bar'), 'foo')
def test_get_key_for_non_existing_value(self):
self.store['foo'] = 'bar'
self.assertRaises(KeyError, lambda: self.store.key('baz'))
@unittest.skipUnless(IS_RUNNING_ON_TRAVISCI, 'requires travis-ci')
class MongoDBTests(unittest.TestCase):
"""Testes de integração executados apenas no Travis-CI.
"""
def setUp(self):
import pymongo
self.client = pymongo.MongoClient('127.0.0.1', 27017)
self.collection = self.client['nurl_tests']['urls']
self.store = datastores.MongoDBDataStore(self.collection)
def tearDown(self):
self.client.drop_database('nurl_tests')
def test_add_new_pair(self):
self.store['foo'] = 'bar'
self.assertEqual(self.store['foo'], 'bar')
def test_add_new_value_on_existing_key(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedKeyError,
lambda: operator.setitem(self.store, 'foo', 'baz'))
def test_add_existing_value_on_new_key(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedValueError,
lambda: operator.setitem(self.store, 'baz', 'bar'))
def test_add_existing_pair(self):
self.store['foo'] = 'bar'
self.assertRaises(datastores.DuplicatedKeyError,
lambda: operator.setitem(self.store, 'foo', 'bar'))
def test_get_key_for_value(self):
self.store['foo'] = 'bar'
self.assertEqual(self.store.key('bar'), 'foo')
def test_get_key_for_non_existing_value(self):
self.store['foo'] = 'bar'
self.assertRaises(KeyError, lambda: self.store.key('baz'))
| 33.361446
| 67
| 0.654027
| 335
| 2,769
| 5.21194
| 0.197015
| 0.134021
| 0.123711
| 0.137457
| 0.707904
| 0.707904
| 0.707904
| 0.707904
| 0.707904
| 0.707904
| 0
| 0.00503
| 0.210184
| 2,769
| 82
| 68
| 33.768293
| 0.793324
| 0.018779
| 0
| 0.745763
| 0
| 0
| 0.072089
| 0
| 0
| 0
| 0
| 0
| 0.20339
| 1
| 0.254237
| false
| 0
| 0.084746
| 0
| 0.372881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f5221fb075e6371a55357c60ee2181f1b63b6d8
| 724
|
py
|
Python
|
python_startup.py
|
DerMaxxiKing/htc_calculator
|
10d2e31a1cb4256fdcbe25ec915d7280927a064a
|
[
"MIT"
] | null | null | null |
python_startup.py
|
DerMaxxiKing/htc_calculator
|
10d2e31a1cb4256fdcbe25ec915d7280927a064a
|
[
"MIT"
] | null | null | null |
python_startup.py
|
DerMaxxiKing/htc_calculator
|
10d2e31a1cb4256fdcbe25ec915d7280927a064a
|
[
"MIT"
] | null | null | null |
import sys
print('Importing FreeCAD and Modules')
sys.path.append('/tmp/squashfs-root/usr/lib/python38.zip')
sys.path.append('/tmp/squashfs-root/usr/lib/python3.8')
sys.path.append('/tmp/squashfs-root/usr/lib/python3.8/lib-dynload')
sys.path.append('/tmp/squashfs-root/usr/lib/python3.8/site-packages')
sys.path.append('/tmp/squashfs-root/usr/lib/')
sys.path.append('mp/squashfs-root/usr/lib/python3.8/lib-dynload')
sys.path.append('/tmp/squashfs-root/usr/lib/python3.8/site-packages')
sys.path.append('mp/squashfs-root/usr/Ext')
sys.path.append('mp/squashfs-root/usr/lib')
# import FreeCAD
# import Part as FCPart
# from Draft import make_fillet
# from FreeCAD import Base
# from Arch import makePipe
| 36.2
| 70
| 0.743094
| 118
| 724
| 4.550847
| 0.279661
| 0.117318
| 0.217877
| 0.268156
| 0.715084
| 0.715084
| 0.715084
| 0.683426
| 0.49162
| 0.49162
| 0
| 0.018072
| 0.082873
| 724
| 19
| 71
| 38.105263
| 0.790663
| 0.161602
| 0
| 0.181818
| 0
| 0
| 0.640893
| 0.591065
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.181818
| 0
| 0.181818
| 0.090909
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f6971eccc837e13122161bd7d079ec01e2d2c08
| 279
|
py
|
Python
|
languages/python/membind/__init__.py
|
robjsliwa/mem_query
|
09a1ba736c4d8faadb9df6618934a611fa168647
|
[
"MIT"
] | null | null | null |
languages/python/membind/__init__.py
|
robjsliwa/mem_query
|
09a1ba736c4d8faadb9df6618934a611fa168647
|
[
"MIT"
] | 8
|
2021-03-05T14:42:48.000Z
|
2021-04-17T19:20:27.000Z
|
languages/python/membind/__init__.py
|
robjsliwa/mem_query
|
09a1ba736c4d8faadb9df6618934a611fa168647
|
[
"MIT"
] | null | null | null |
from membind.memory import linear_mem_addr, write_to_memory,\
ptr_to_str_with_len, ptr_to_str, write_str, result_ptr_to_value
__all__ = [
'linear_mem_addr',
'write_to_memory',
'ptr_to_str_with_len',
'ptr_to_str',
'write_str',
'result_ptr_to_value'
]
| 23.25
| 67
| 0.731183
| 45
| 279
| 3.822222
| 0.355556
| 0.174419
| 0.186047
| 0.209302
| 0.848837
| 0.848837
| 0.848837
| 0.848837
| 0.848837
| 0.848837
| 0
| 0
| 0.172043
| 279
| 12
| 68
| 23.25
| 0.744589
| 0
| 0
| 0
| 0
| 0
| 0.310714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7f7fa65983a489ad3d02e1964ea3b9b83aa90d47
| 5,701
|
py
|
Python
|
ws_lock/tests/test_with_pytest.py
|
sevdog/test-websocker-user-lock
|
8aef3249f623e23479d61c7681e0f59909c47926
|
[
"MIT"
] | null | null | null |
ws_lock/tests/test_with_pytest.py
|
sevdog/test-websocker-user-lock
|
8aef3249f623e23479d61c7681e0f59909c47926
|
[
"MIT"
] | null | null | null |
ws_lock/tests/test_with_pytest.py
|
sevdog/test-websocker-user-lock
|
8aef3249f623e23479d61c7681e0f59909c47926
|
[
"MIT"
] | null | null | null |
import threading
from asgiref.sync import sync_to_async
import pytest
from ..models import Item
@pytest.mark.django_db(transaction=True)
class TestLocks:
@pytest.mark.asyncio
async def test_fetch(self, django_db_setup_for_sockets):
@sync_to_async
def fetch():
return list(Item.objects.all())
items = await fetch()
assert len(items) == 8
@pytest.mark.asyncio
async def test_denied_access(self, wrong_socket_user):
print('test', threading.get_ident())
assert wrong_socket_user.connected is False
@pytest.mark.asyncio
async def test_shared_bar_baz(self, allowed_socket_user_baz, allowed_socket_user_bar):
assert allowed_socket_user_baz.connected is True
assert allowed_socket_user_bar.connected is True
# item 3 is FOO
await allowed_socket_user_bar.communicator.send_json_to({'items': [3]})
received_bar = await allowed_socket_user_bar.communicator.receive_json_from()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_bar == received_baz == [{
'user': allowed_socket_user_bar.user.id,
'item': 3,
'locked': True
}]
# leave lock
await allowed_socket_user_bar.communicator.send_json_to({'items': []})
received_bar = await allowed_socket_user_bar.communicator.receive_json_from()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_bar == received_baz == [{
'user': allowed_socket_user_bar.user.id,
'item': 3,
'locked': False
}]
@pytest.mark.asyncio
async def test_unshared_bar_baz(self, allowed_socket_user_baz, allowed_socket_user_bar):
assert allowed_socket_user_baz.connected is True
assert allowed_socket_user_bar.connected is True
# item 7 is BAZ
await allowed_socket_user_baz.communicator.send_json_to({'items': [7]})
assert await allowed_socket_user_bar.communicator.receive_nothing()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 7,
'locked': True
}]
# leave lock
await allowed_socket_user_baz.communicator.send_json_to({'items': []})
assert await allowed_socket_user_bar.communicator.receive_nothing()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 7,
'locked': False
}]
@pytest.mark.asyncio
async def test_multiple_bar_baz(self, allowed_socket_user_baz, allowed_socket_user_bar):
assert allowed_socket_user_baz.connected is True
assert allowed_socket_user_bar.connected is True
# item 7 is BAZ, item 3 is FOO
await allowed_socket_user_baz.communicator.send_json_to({'items': [3, 7]})
received_bar = await allowed_socket_user_bar.communicator.receive_json_from()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_bar == [{
'user': allowed_socket_user_baz.user.id,
'item': 3,
'locked': True
}]
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 3,
'locked': True
}]
assert await allowed_socket_user_bar.communicator.receive_nothing()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 7,
'locked': True
}]
# leave lock
await allowed_socket_user_baz.communicator.send_json_to({'items': []})
received_bar = await allowed_socket_user_bar.communicator.receive_json_from()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_bar == [{
'user': allowed_socket_user_baz.user.id,
'item': 3,
'locked': False
}]
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 3,
'locked': False
}]
assert await allowed_socket_user_bar.communicator.receive_nothing()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_baz == [{
'user': allowed_socket_user_baz.user.id,
'item': 7,
'locked': False
}]
@pytest.mark.asyncio
async def test_shared_no_conflict_bar_baz(self, allowed_socket_user_baz, allowed_socket_user_bar):
assert allowed_socket_user_baz.connected is True
assert allowed_socket_user_bar.connected is True
# item 3 is FOO
await allowed_socket_user_bar.communicator.send_json_to({'items': [3]})
received_bar = await allowed_socket_user_bar.communicator.receive_json_from()
received_baz = await allowed_socket_user_baz.communicator.receive_json_from()
assert received_bar == received_baz == [{
'user': allowed_socket_user_bar.user.id,
'item': 3,
'locked': True
}]
# item 3 already locked
await allowed_socket_user_baz.communicator.send_json_to({'items': [3]})
assert await allowed_socket_user_bar.communicator.receive_nothing()
assert await allowed_socket_user_baz.communicator.receive_nothing()
| 42.864662
| 102
| 0.66655
| 699
| 5,701
| 5.044349
| 0.095851
| 0.161656
| 0.265173
| 0.175837
| 0.894214
| 0.892513
| 0.876064
| 0.863585
| 0.835508
| 0.819342
| 0
| 0.005585
| 0.246273
| 5,701
| 132
| 103
| 43.189394
| 0.814987
| 0.021926
| 0
| 0.793103
| 0
| 0
| 0.035567
| 0
| 0
| 0
| 0
| 0
| 0.232759
| 1
| 0.008621
| false
| 0
| 0.034483
| 0.008621
| 0.060345
| 0.008621
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7fb94eea51dc1edfd1ce09d212b99a1995f6e60a
| 9,660
|
py
|
Python
|
easyai/model/base_block/cls/attention_net_block.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | 1
|
2020-09-05T09:18:56.000Z
|
2020-09-05T09:18:56.000Z
|
easyai/model/base_block/cls/attention_net_block.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | 8
|
2020-04-20T02:18:55.000Z
|
2022-03-12T00:24:50.000Z
|
easyai/model/base_block/cls/attention_net_block.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:
from easyai.base_name.block_name import NormalizationType, ActivationType
from easyai.model.base_block.utility.base_block import *
from easyai.model.base_block.cls.preact_resnet_block import PreActBottleNeck
class AttentionNetBlockName():
AttentionModule1 = "attentionModule1"
AttentionModule2 = "attentionModule2"
AttentionModule3 = "attentionModule3"
class AttentionModule1(BaseBlock):
def __init__(self, in_channels, out_channels, p=1, t=2, r=1,
bn_name=NormalizationType.BatchNormalize2d,
activation_name=ActivationType.ReLU):
super().__init__(AttentionNetBlockName.AttentionModule1)
# """The hyperparameter p denotes the number of preprocessing Residual
# Units before splitting into trunk branch and mask branch. t denotes
# the number of Residual Units in trunk branch. r denotes the number of
# Residual Units between adjacent pooling layer in the mask branch."""
assert in_channels == out_channels
self.pre = self._make_residual(in_channels, out_channels, p)
self.trunk = self._make_residual(in_channels, out_channels, t)
self.soft_resdown1 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown2 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown3 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown4 = self._make_residual(in_channels, out_channels, r)
self.soft_resup1 = self._make_residual(in_channels, out_channels, r)
self.soft_resup2 = self._make_residual(in_channels, out_channels, r)
self.soft_resup3 = self._make_residual(in_channels, out_channels, r)
self.soft_resup4 = self._make_residual(in_channels, out_channels, r)
bottleneck_channels = int(out_channels / 4)
self.shortcut_short = PreActBottleNeck(in_channels, bottleneck_channels, 1)
self.shortcut_long = PreActBottleNeck(in_channels, bottleneck_channels, 1)
self.sigmoid = nn.Sequential(
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.Sigmoid()
)
self.last = self._make_residual(in_channels, out_channels, p)
def forward(self, x):
###We make the size of the smallest output map in each mask branch 7*7 to be consistent
# with the smallest trunk output map size.
###Thus 3,2,1 max-pooling layers are used in mask branch with input size 56 * 56, 28 * 28, 14 * 14 respectively.
x = self.pre(x)
input_size = (x.size(2), x.size(3))
x_t = self.trunk(x)
# first downsample out 28
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown1(x_s)
# 28 shortcut
shape1 = (x_s.size(2), x_s.size(3))
shortcut_long = self.shortcut_long(x_s)
# seccond downsample out 14
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown2(x_s)
# 14 shortcut
shape2 = (x_s.size(2), x_s.size(3))
shortcut_short = self.soft_resdown3(x_s)
# third downsample out 7
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown3(x_s)
# mid
x_s = self.soft_resdown4(x_s)
x_s = self.soft_resup1(x_s)
# first upsample out 14
x_s = self.soft_resup2(x_s)
x_s = F.interpolate(x_s, size=shape2)
x_s += shortcut_short
# second upsample out 28
x_s = self.soft_resup3(x_s)
x_s = F.interpolate(x_s, size=shape1)
x_s += shortcut_long
# thrid upsample out 54
x_s = self.soft_resup4(x_s)
x_s = F.interpolate(x_s, size=input_size)
x_s = self.sigmoid(x_s)
x = (1 + x_s) * x_t
x = self.last(x)
return x
def _make_residual(self, in_channels, out_channels, p):
layers = []
bottleneck_channels = int(out_channels / 4)
for _ in range(p):
layers.append(PreActBottleNeck(in_channels, bottleneck_channels, 1))
return nn.Sequential(*layers)
class AttentionModule2(BaseBlock):
def __init__(self, in_channels, out_channels, p=1, t=2, r=1,
bn_name=NormalizationType.BatchNormalize2d,
activation_name=ActivationType.ReLU):
super().__init__(AttentionNetBlockName.AttentionModule2)
# """The hyperparameter p denotes the number of preprocessing Residual
# Units before splitting into trunk branch and mask branch. t denotes
# the number of Residual Units in trunk branch. r denotes the number of
# Residual Units between adjacent pooling layer in the mask branch."""
assert in_channels == out_channels
self.pre = self._make_residual(in_channels, out_channels, p)
self.trunk = self._make_residual(in_channels, out_channels, t)
self.soft_resdown1 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown2 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown3 = self._make_residual(in_channels, out_channels, r)
self.soft_resup1 = self._make_residual(in_channels, out_channels, r)
self.soft_resup2 = self._make_residual(in_channels, out_channels, r)
self.soft_resup3 = self._make_residual(in_channels, out_channels, r)
bottleneck_channels = int(out_channels / 4)
self.shortcut = PreActBottleNeck(in_channels, bottleneck_channels, 1)
self.sigmoid = nn.Sequential(
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.Sigmoid()
)
self.last = self._make_residual(in_channels, out_channels, p)
def forward(self, x):
x = self.pre(x)
input_size = (x.size(2), x.size(3))
x_t = self.trunk(x)
# first downsample out 14
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown1(x_s)
# 14 shortcut
shape1 = (x_s.size(2), x_s.size(3))
shortcut = self.shortcut(x_s)
# seccond downsample out 7
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown2(x_s)
# mid
x_s = self.soft_resdown3(x_s)
x_s = self.soft_resup1(x_s)
# first upsample out 14
x_s = self.soft_resup2(x_s)
x_s = F.interpolate(x_s, size=shape1)
x_s += shortcut
# second upsample out 28
x_s = self.soft_resup3(x_s)
x_s = F.interpolate(x_s, size=input_size)
x_s = self.sigmoid(x_s)
x = (1 + x_s) * x_t
x = self.last(x)
return x
def _make_residual(self, in_channels, out_channels, p):
layers = []
bottleneck_channels = int(out_channels / 4)
for _ in range(p):
layers.append(PreActBottleNeck(in_channels, bottleneck_channels, 1))
return nn.Sequential(*layers)
class AttentionModule3(BaseBlock):
def __init__(self, in_channels, out_channels, p=1, t=2, r=1,
bn_name=NormalizationType.BatchNormalize2d,
activation_name=ActivationType.ReLU):
super().__init__(AttentionNetBlockName.AttentionModule3)
assert in_channels == out_channels
self.pre = self._make_residual(in_channels, out_channels, p)
self.trunk = self._make_residual(in_channels, out_channels, t)
self.soft_resdown1 = self._make_residual(in_channels, out_channels, r)
self.soft_resdown2 = self._make_residual(in_channels, out_channels, r)
self.soft_resup1 = self._make_residual(in_channels, out_channels, r)
self.soft_resup2 = self._make_residual(in_channels, out_channels, r)
bottleneck_channels = int(out_channels / 4)
self.shortcut = PreActBottleNeck(in_channels, bottleneck_channels, 1)
self.sigmoid = nn.Sequential(
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=1),
nn.Sigmoid()
)
self.last = self._make_residual(in_channels, out_channels, p)
def forward(self, x):
x = self.pre(x)
input_size = (x.size(2), x.size(3))
x_t = self.trunk(x)
# first downsample out 14
x_s = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)
x_s = self.soft_resdown1(x_s)
# mid
x_s = self.soft_resdown2(x_s)
x_s = self.soft_resup1(x_s)
# first upsample out 14
x_s = self.soft_resup2(x_s)
x_s = F.interpolate(x_s, size=input_size)
x_s = self.sigmoid(x_s)
x = (1 + x_s) * x_t
x = self.last(x)
return x
def _make_residual(self, in_channels, out_channels, p):
layers = []
bottleneck_channels = int(out_channels / 4)
for _ in range(p):
layers.append(PreActBottleNeck(in_channels, bottleneck_channels, 1))
return nn.Sequential(*layers)
| 36.590909
| 120
| 0.646377
| 1,319
| 9,660
| 4.457165
| 0.10235
| 0.025515
| 0.135737
| 0.128593
| 0.87702
| 0.862732
| 0.862732
| 0.843341
| 0.843341
| 0.839599
| 0
| 0.025498
| 0.257039
| 9,660
| 264
| 121
| 36.590909
| 0.793646
| 0.120393
| 0
| 0.820359
| 0
| 0
| 0.005672
| 0
| 0
| 0
| 0
| 0
| 0.017964
| 1
| 0.053892
| false
| 0
| 0.017964
| 0
| 0.149701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6a6b0de19abebc078675acb0c73aac6bfabe7ff
| 4,981
|
py
|
Python
|
pypika/tests/test_groupby_modifiers.py
|
trust-kaz/pypika
|
78ce885ca445e63bb6168d4456dd11072c60e63b
|
[
"Apache-2.0"
] | null | null | null |
pypika/tests/test_groupby_modifiers.py
|
trust-kaz/pypika
|
78ce885ca445e63bb6168d4456dd11072c60e63b
|
[
"Apache-2.0"
] | null | null | null |
pypika/tests/test_groupby_modifiers.py
|
trust-kaz/pypika
|
78ce885ca445e63bb6168d4456dd11072c60e63b
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from pypika import Table, Query, Rollup, functions as fn, RollupException
__author__ = "Timothy Heys"
__email__ = "theys@kayak.com"
class RollupTests(unittest.TestCase):
table = Table("abc")
def test_mysql_one_groupby(self):
q = (
Query.from_(self.table)
.select(self.table.foo, fn.Sum(self.table.bar))
.rollup(self.table.foo, vendor="mysql")
)
self.assertEqual(
'SELECT "foo",SUM("bar") FROM "abc" GROUP BY "foo" WITH ROLLUP', str(q)
)
def test_mysql_rollup_two_groupbys(self):
q = (
Query.from_(self.table)
.select(self.table.foo, self.table.fiz, fn.Sum(self.table.bar))
.rollup(self.table.foo, self.table.fiz, vendor="mysql")
)
self.assertEqual(
'SELECT "foo","fiz",SUM("bar") FROM "abc" GROUP BY "foo","fiz" WITH ROLLUP',
str(q),
)
def test_no_rollup_before_groupby(self):
with self.assertRaises(RollupException):
Query.from_(self.table).select(
self.table.foo, fn.Sum(self.table.bar)
).rollup(vendor="mysql")
def test_no_rollup_after_rollup_mysql(self):
with self.assertRaises(AttributeError):
Query.from_(self.table).select(
self.table.foo, self.table.fiz, fn.Sum(self.table.bar)
).rollup(self.table.foo, vendor="mysql").rollup(
self.table.fiz, vendor="mysql"
)
def test_verticaoracle_func_one_groupby(self):
q = (
Query.from_(self.table)
.select(self.table.foo, fn.Sum(self.table.bar))
.groupby(Rollup(self.table.foo))
)
self.assertEqual(
'SELECT "foo",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo")', str(q)
)
def test_verticaoracle_func_two_groupbys(self):
q = (
Query.from_(self.table)
.select(self.table.foo, self.table.fiz, fn.Sum(self.table.bar))
.groupby(Rollup(self.table.foo, self.table.fiz,))
)
self.assertEqual(
'SELECT "foo","fiz",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo","fiz")',
str(q),
)
def test_verticaoracle_func_partial(self):
q = (
Query.from_(self.table)
.select(
self.table.foo, self.table.fiz, self.table.buz, fn.Sum(self.table.bar)
)
.groupby(Rollup(self.table.foo, self.table.fiz,), self.table.buz,)
)
self.assertEqual(
'SELECT "foo","fiz","buz",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo","fiz"),"buz"',
str(q),
)
def test_verticaoracle_from_groupbys(self):
q = (
Query.from_(self.table)
.select(self.table.foo, fn.Sum(self.table.bar))
.rollup(self.table.foo)
)
self.assertEqual(
'SELECT "foo",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo")', str(q)
)
def test_verticaoracle_from_two_groupbys(self):
q = (
Query.from_(self.table)
.select(self.table.foo, self.table.fiz, fn.Sum(self.table.bar))
.rollup(self.table.foo, self.table.fiz,)
)
self.assertEqual(
'SELECT "foo","fiz",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo","fiz")',
str(q),
)
def test_verticaoracle_from_parameters(self):
q = (
Query.from_(self.table)
.select(self.table.foo, self.table.fiz, fn.Sum(self.table.bar))
.groupby(self.table.foo,)
.rollup(self.table.fiz,)
)
self.assertEqual(
'SELECT "foo","fiz",SUM("bar") FROM "abc" GROUP BY "foo",ROLLUP("fiz")',
str(q),
)
def test_verticaoracle_multiple_rollups(self):
q = (
Query.from_(self.table)
.select(self.table.foo, self.table.fiz, fn.Sum(self.table.bar))
.rollup(self.table.foo,)
.rollup(self.table.fiz,)
)
self.assertEqual(
'SELECT "foo","fiz",SUM("bar") FROM "abc" GROUP BY ROLLUP("foo","fiz")',
str(q),
)
def test_verticaoracle_rollups_with_parity(self):
q = (
Query.from_(self.table)
.select(self.table.buz,)
.rollup([self.table.foo, self.table.bar], self.table.fiz,)
)
self.assertEqual(
'SELECT "buz" FROM "abc" GROUP BY ROLLUP(("foo","bar"),"fiz")', str(q)
)
def test_verticaoracle_rollups_with_multiple_rollups_and_parity(self):
q = (
Query.from_(self.table)
.select(self.table.buz,)
.rollup([self.table.foo, self.table.bar],)
.rollup([self.table.fiz, self.table.buz],)
)
self.assertEqual(
'SELECT "buz" FROM "abc" GROUP BY ROLLUP(("foo","bar"),("fiz","buz"))',
str(q),
)
| 31.525316
| 96
| 0.543264
| 594
| 4,981
| 4.434343
| 0.094276
| 0.23918
| 0.104784
| 0.091116
| 0.840926
| 0.829916
| 0.776006
| 0.774867
| 0.757403
| 0.720577
| 0
| 0
| 0.306364
| 4,981
| 157
| 97
| 31.726115
| 0.762373
| 0
| 0
| 0.457364
| 0
| 0.046512
| 0.158201
| 0.045172
| 0
| 0
| 0
| 0
| 0.100775
| 1
| 0.100775
| false
| 0
| 0.015504
| 0
| 0.131783
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6dbf468315177b0c43f8a136dd4aac24f92ec6f
| 2,881
|
py
|
Python
|
alpyro_msgs/control_msgs/jointtrajectoryaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | 1
|
2020-12-13T13:07:10.000Z
|
2020-12-13T13:07:10.000Z
|
alpyro_msgs/control_msgs/jointtrajectoryaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
alpyro_msgs/control_msgs/jointtrajectoryaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
from typing import List
from typing_extensions import Annotated
from typing import Final
from alpyro_msgs import RosMessage
from alpyro_msgs.control_msgs.jointtrajectoryactionfeedback import JointTrajectoryActionFeedback
from alpyro_msgs.control_msgs.jointtrajectoryactiongoal import JointTrajectoryActionGoal
from alpyro_msgs.control_msgs.jointtrajectoryactionresult import JointTrajectoryActionResult
class JointTrajectoryAction(RosMessage):
__msg_typ__ = "control_msgs/JointTrajectoryAction"
__msg_def__ = "Y29udHJvbF9tc2dzL0pvaW50VHJhamVjdG9yeUFjdGlvbkdvYWwgYWN0aW9uX2dvYWwKICBzdGRfbXNncy9IZWFkZXIgaGVhZGVyCiAgICB1aW50MzIgc2VxCiAgICB0aW1lIHN0YW1wCiAgICBzdHJpbmcgZnJhbWVfaWQKICBhY3Rpb25saWJfbXNncy9Hb2FsSUQgZ29hbF9pZAogICAgdGltZSBzdGFtcAogICAgc3RyaW5nIGlkCiAgY29udHJvbF9tc2dzL0pvaW50VHJhamVjdG9yeUdvYWwgZ29hbAogICAgdHJhamVjdG9yeV9tc2dzL0pvaW50VHJhamVjdG9yeSB0cmFqZWN0b3J5CiAgICAgIHN0ZF9tc2dzL0hlYWRlciBoZWFkZXIKICAgICAgICB1aW50MzIgc2VxCiAgICAgICAgdGltZSBzdGFtcAogICAgICAgIHN0cmluZyBmcmFtZV9pZAogICAgICBzdHJpbmdbXSBqb2ludF9uYW1lcwogICAgICB0cmFqZWN0b3J5X21zZ3MvSm9pbnRUcmFqZWN0b3J5UG9pbnRbXSBwb2ludHMKICAgICAgICBmbG9hdDY0W10gcG9zaXRpb25zCiAgICAgICAgZmxvYXQ2NFtdIHZlbG9jaXRpZXMKICAgICAgICBmbG9hdDY0W10gYWNjZWxlcmF0aW9ucwogICAgICAgIGZsb2F0NjRbXSBlZmZvcnQKICAgICAgICBkdXJhdGlvbiB0aW1lX2Zyb21fc3RhcnQKY29udHJvbF9tc2dzL0pvaW50VHJhamVjdG9yeUFjdGlvblJlc3VsdCBhY3Rpb25fcmVzdWx0CiAgc3RkX21zZ3MvSGVhZGVyIGhlYWRlcgogICAgdWludDMyIHNlcQogICAgdGltZSBzdGFtcAogICAgc3RyaW5nIGZyYW1lX2lkCiAgYWN0aW9ubGliX21zZ3MvR29hbFN0YXR1cyBzdGF0dXMKICAgIHVpbnQ4IFBFTkRJTkc9MAogICAgdWludDggQUNUSVZFPTEKICAgIHVpbnQ4IFBSRUVNUFRFRD0yCiAgICB1aW50OCBTVUNDRUVERUQ9MwogICAgdWludDggQUJPUlRFRD00CiAgICB1aW50OCBSRUpFQ1RFRD01CiAgICB1aW50OCBQUkVFTVBUSU5HPTYKICAgIHVpbnQ4IFJFQ0FMTElORz03CiAgICB1aW50OCBSRUNBTExFRD04CiAgICB1aW50OCBMT1NUPTkKICAgIGFjdGlvbmxpYl9tc2dzL0dvYWxJRCBnb2FsX2lkCiAgICAgIHRpbWUgc3RhbXAKICAgICAgc3RyaW5nIGlkCiAgICB1aW50OCBzdGF0dXMKICAgIHN0cmluZyB0ZXh0CiAgY29udHJvbF9tc2dzL0pvaW50VHJhamVjdG9yeVJlc3VsdCByZXN1bHQKY29udHJvbF9tc2dzL0pvaW50VHJhamVjdG9yeUFjdGlvbkZlZWRiYWNrIGFjdGlvbl9mZWVkYmFjawogIHN0ZF9tc2dzL0hlYWRlciBoZWFkZXIKICAgIHVpbnQzMiBzZXEKICAgIHRpbWUgc3RhbXAKICAgIHN0cmluZyBmcmFtZV9pZAogIGFjdGlvbmxpYl9tc2dzL0dvYWxTdGF0dXMgc3RhdHVzCiAgICB1aW50OCBQRU5ESU5HPTAKICAgIHVpbnQ4IEFDVElWRT0xCiAgICB1aW50OCBQUkVFTVBURUQ9MgogICAgdWludDggU1VDQ0VFREVEPTMKICAgIHVpbnQ4IEFCT1JURUQ9NAogICAgdWludDggUkVKRUNURUQ9NQogICAgdWludDggUFJFRU1QVElORz02CiAgICB1aW50OCBSRUNBTExJTkc9NwogICAgdWludDggUkVDQUxMRUQ9OAogICAgdWludDggTE9TVD05CiAgICBhY3Rpb25saWJfbXNncy9Hb2FsSUQgZ29hbF9pZAogICAgICB0aW1lIHN0YW1wCiAgICAgIHN0cmluZyBpZAogICAgdWludDggc3RhdHVzCiAgICBzdHJpbmcgdGV4dAogIGNvbnRyb2xfbXNncy9Kb2ludFRyYWplY3RvcnlGZWVkYmFjayBmZWVkYmFjawoK"
__md5_sum__ = "a04ba3ee8f6a2d0985a6aeaf23d9d7ad"
action_goal: JointTrajectoryActionGoal
action_result: JointTrajectoryActionResult
action_feedback: JointTrajectoryActionFeedback
| 160.055556
| 2,194
| 0.971538
| 65
| 2,881
| 42.646154
| 0.415385
| 0.01443
| 0.020202
| 0.022727
| 0.027056
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0.020826
| 2,881
| 17
| 2,195
| 169.470588
| 0.885856
| 0
| 0
| 0
| 0
| 0
| 0.778202
| 0.778202
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f6dd7716a1796e02d44dbb93f413777f00a869df
| 10,279
|
py
|
Python
|
DQMServices/Examples/python/test/MessageLogger_cfi.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
DQMServices/Examples/python/test/MessageLogger_cfi.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
DQMServices/Examples/python/test/MessageLogger_cfi.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
import FWCore.ParameterSet.Config as cms
MessageLogger = cms.Service("MessageLogger",
MessageLogger = cms.untracked.PSet(
MEtoEDMConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_MEtoEDMConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
lineLength = cms.untracked.int32(132),
PostConverterAnalyzer_PostConverterAnalyzer = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
Root_Error = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
Root_Warning = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_ConverterTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_EDMtoMEConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_ConverterQualityTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ScheduleExecutionFailure = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EventSetupDependency = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
EDMtoMEConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noLineBreaks = cms.untracked.bool(True),
ConverterTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
)
),
cout = cms.untracked.PSet(
MEtoEDMConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_MEtoEDMConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
threshold = cms.untracked.string('INFO'),
lineLength = cms.untracked.int32(132),
PostConverterAnalyzer_PostConverterAnalyzer = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_ConverterTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_EDMtoMEConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_ConverterQualityTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
EDMtoMEConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noLineBreaks = cms.untracked.bool(True),
ConverterTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
)
),
cerr = cms.untracked.PSet(
MEtoEDMConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_MEtoEDMConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
threshold = cms.untracked.string('WARNING'),
lineLength = cms.untracked.int32(132),
PostConverterAnalyzer_PostConverterAnalyzer = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
Root_Error = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
Root_Warning = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterTester_ConverterTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_EDMtoMEConverter = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_ConverterQualityTester = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ScheduleExecutionFailure = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
MEtoEDMConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
ConverterQualityTester_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EventSetupDependency = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
EDMtoMEConverter_endJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
PostConverterAnalyzer_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
EDMtoMEConverter_beginRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noLineBreaks = cms.untracked.bool(True),
ConverterTester_endRun = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
)
),
FrameworkJobReport = cms.untracked.PSet(
FwkJob = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
)
),
fwkJobReports = cms.untracked.vstring('FrameworkJobReport'),
categories = cms.untracked.vstring('FwkJob',
'MEtoEDMConverter_MEtoEDMConverter',
'MEtoEDMConverter_endJob',
'MEtoEDMConverter_beginRun',
'MEtoEDMConverter_endRun',
'EDMtoMEConverter_EDMtoMEConverter',
'EDMtoMEConverter_endJob',
'EDMtoMEConverter_beginRun',
'EDMtoMEConverter_endRun',
'ConverterTester_ConverterTester',
'ConverterTester_endJob',
'ConverterTester_beginRun',
'ConverterTester_endRun',
'PostConverterAnalyzer_PostConverterAnalyzer',
'PostConverterAnalyzer_endJob',
'PostConverterAnalyzer_beginRun',
'PostConverterAnalyzer_endRun',
'ConverterQualityTester_ConverterQualityTester',
'ConverterQualityTester_endJob',
'ConverterQualityTester_beginRun',
'ConverterQualityTester_endRun',
'ScheduleExecutionFailure',
'EventSetupDependency',
'Root_Warning',
'Root_Error'),
destinations = cms.untracked.vstring('MessageLogger',
'cout',
'cerr')
)
| 38.498127
| 75
| 0.618543
| 841
| 10,279
| 7.457788
| 0.052319
| 0.304209
| 0.193878
| 0.241071
| 0.857462
| 0.857462
| 0.857462
| 0.857462
| 0.850128
| 0.850128
| 0
| 0.097381
| 0.286701
| 10,279
| 266
| 76
| 38.642857
| 0.758047
| 0
| 0
| 0.837121
| 0
| 0
| 0.068593
| 0.057793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003788
| 0
| 0.003788
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
63dd8896d4477a4e90952ff2908681c9829c918b
| 1,889
|
py
|
Python
|
bigsi/tests/scoring.py
|
Phelimb/bfg
|
bf34abbb9d6f72a9f0c64c40eefc44d810a2502e
|
[
"MIT"
] | 109
|
2017-12-13T12:25:40.000Z
|
2021-08-18T08:35:44.000Z
|
bigsi/tests/scoring.py
|
Phelimb/bfg
|
bf34abbb9d6f72a9f0c64c40eefc44d810a2502e
|
[
"MIT"
] | 25
|
2017-12-14T04:03:46.000Z
|
2021-11-04T11:50:34.000Z
|
bigsi/tests/scoring.py
|
Phelimb/bfg
|
bf34abbb9d6f72a9f0c64c40eefc44d810a2502e
|
[
"MIT"
] | 20
|
2017-12-22T02:14:13.000Z
|
2021-02-01T02:49:02.000Z
|
from hypothesis import given
from hypothesis import example
import hypothesis.strategies as st
import os
from bigsi.scoring import Scorer
def test_score():
s = "1111111111111111111111111111111111111111110000000000000000000000000000001111111111111111111111100000000000000000000100000010001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110000000000000000000000100000000010001111111111000000000000100000000000000000000000000000000100000000000010000000010000001000000000010000000000000000010001111111100000000000001100010000000000000000000001000000000000110000000000000000000000100000000000000000000100000000000000001010001111111111100000000000000000000100100010011111111111111111100000000001001000001000000000000000000000000000001000000010100000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111100000010110001000100000000000000000000000000000000000001000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100010000000100000000001010000001111111111111111111111111111111111111111111111111111111111111111100100000000010000000010000000001111111111111111111111111111111111111111111111111111111111111111111111100000100000000000010000000000000010000000011111111000000100010"
scorer = Scorer(5 * 10 ** 5)
assert scorer.score(s) == {
"length": 1174,
"max_mismatches": 269,
"max_nident": 1156,
"max_pident": 98.46678023850085,
"max_score": 1119.98,
"min_mismatches": 18,
"min_nident": 905,
"min_pident": 77.08688245315162,
"min_score": 96.04,
"mismatches": 33,
"nident": 1141,
"pident": 97.18909710391823,
"score": 1064.89,
"evalue": 0.0,
"pvalue": 0.0,
"log_evalue": -1407.74,
"log_pvalue": -1407.74,
}
| 59.03125
| 1,154
| 0.827422
| 88
| 1,889
| 17.636364
| 0.568182
| 0.018041
| 0.025773
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.751202
| 0.119111
| 1,889
| 31
| 1,155
| 60.935484
| 0.18149
| 0
| 0
| 0
| 0
| 0
| 0.685548
| 0.605611
| 0
| 1
| 0
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.185185
| 0
| 0.222222
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1207a30ab88e9f8a0b8da6bbfdfc28e6385059ad
| 23,009
|
py
|
Python
|
controls/testing/test_integration/test_fys.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 11
|
2021-01-23T01:09:54.000Z
|
2021-01-25T07:16:30.000Z
|
controls/testing/test_integration/test_fys.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 7
|
2021-04-06T18:19:10.000Z
|
2021-09-22T19:45:03.000Z
|
controls/testing/test_integration/test_fys.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 3
|
2021-01-23T18:55:32.000Z
|
2021-02-16T17:47:59.000Z
|
from datetime import date
from controls.models import FinancialYear, ModuleSettings, Period
from dateutil.relativedelta import relativedelta
from django.contrib.auth import get_user_model
from django.shortcuts import reverse
from django.test import TestCase
from nominals.models import Nominal, NominalTransaction
class CreateFyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.url = reverse("controls:fy_create")
cls.user = get_user_model().objects.create_superuser(
username="dummy", password="dummy")
def test_successful_first_fy(self):
"""
First FY should change the module settings periods
"""
self.client.force_login(self.user)
ModuleSettings.objects.create(
cash_book_period=None,
nominals_period=None,
purchases_period=None,
sales_period=None
)
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "12-2020",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
302
)
fys = FinancialYear.objects.all()
self.assertEqual(
len(fys),
1
)
fy = fys[0]
self.assertEqual(
fy.financial_year,
2020
)
self.assertEqual(
fy.number_of_periods,
12
)
periods = Period.objects.all()
self.assertEqual(
len(periods),
12
)
for i, period in enumerate(periods):
self.assertEqual(
period.fy,
fy
)
self.assertEqual(
period.month_start,
date(2020, i + 1, 1)
)
self.assertEqual(
period.period,
str(i + 1).rjust(2, "0")
)
self.assertEqual(
period.fy_and_period,
str(fy) + str(i+1).rjust(2, "0")
)
mod_setttings = ModuleSettings.objects.first()
self.assertEqual(
mod_setttings.cash_book_period,
periods[0]
)
self.assertEqual(
mod_setttings.nominals_period,
periods[0]
)
self.assertEqual(
mod_setttings.purchases_period,
periods[0]
)
self.assertEqual(
mod_setttings.sales_period,
periods[0]
)
def test_successful_second_fy(self):
"""
First FY should change the module settings periods
"""
self.client.force_login(self.user)
fy_2019 = FinancialYear.objects.create(
financial_year=2019, number_of_periods=1)
first_and_only_period_of_2019 = Period.objects.create(
fy=fy_2019, period="01", fy_and_period="201901", month_start=date(2019, 12, 1))
ModuleSettings.objects.create(
cash_book_period=first_and_only_period_of_2019,
nominals_period=first_and_only_period_of_2019,
purchases_period=first_and_only_period_of_2019,
sales_period=first_and_only_period_of_2019
)
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "12-2020",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
302
)
fys = FinancialYear.objects.all().order_by("financial_year")
self.assertEqual(
len(fys),
2
)
fy = fys[1]
self.assertEqual(
fy.financial_year,
2020
)
self.assertEqual(
fy.number_of_periods,
12
)
periods = Period.objects.exclude(fy_and_period="201901").all()
self.assertEqual(
len(periods),
12
)
for i, period in enumerate(periods):
self.assertEqual(
period.fy,
fy
)
self.assertEqual(
period.month_start,
date(2020, i + 1, 1)
)
self.assertEqual(
period.period,
str(i + 1).rjust(2, "0")
)
self.assertEqual(
period.fy_and_period,
str(fy) + str(i+1).rjust(2, "0")
)
mod_setttings = ModuleSettings.objects.first()
# check posting periods have not changed
self.assertEqual(
mod_setttings.cash_book_period,
first_and_only_period_of_2019
)
self.assertEqual(
mod_setttings.nominals_period,
first_and_only_period_of_2019
)
self.assertEqual(
mod_setttings.purchases_period,
first_and_only_period_of_2019
)
self.assertEqual(
mod_setttings.sales_period,
first_and_only_period_of_2019
)
def test_failure_when_fys_are_not_consecutive(self):
self.client.force_login(self.user)
FinancialYear.objects.create(financial_year=2018, number_of_periods=12)
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "12-2020",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
200
)
self.assertContains(
response,
"<li>Financial years must be consecutive. The earliest is 2018 and the latest is 2018</li>"
)
self.assertEqual(
len(
FinancialYear.objects.all()
),
1
)
self.assertEqual(
len(
Period.objects.all()
),
0
)
def test_failure_when_period_does_have_month_start(self):
self.client.force_login(self.user)
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
200
)
self.assertContains(
response,
"<li>All periods you wish to create must have a month selected. Delete any unwanted periods otherwise</li>"
)
def test_failure_when_month_starts_are_not_consecutive(self):
self.client.force_login(self.user)
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "01-2021",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
200
)
self.assertContains(
response,
"<li>Periods must be consecutive calendar months</li>"
)
def test_failure_when_months_across_all_fys_are_not_consecutive(self):
self.client.force_login(self.user)
fy_2019 = FinancialYear.objects.create(
financial_year=2019, number_of_periods=1)
p = Period.objects.create(
fy=fy_2019, fy_and_period="201901", period="01", month_start=date(2020, 1, 1))
response = self.client.post(self.url, data={
"financial_year": 2020,
"period-0-month_start": "01-2020",
"period-1-month_start": "02-2020",
"period-2-month_start": "03-2020",
"period-3-month_start": "04-2020",
"period-4-month_start": "05-2020",
"period-5-month_start": "06-2020",
"period-6-month_start": "07-2020",
"period-7-month_start": "08-2020",
"period-8-month_start": "09-2020",
"period-9-month_start": "10-2020",
"period-10-month_start": "11-2020",
"period-11-month_start": "12-2020",
"period-TOTAL_FORMS": "12",
"period-INITIAL_FORMS": "0",
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
self.assertEqual(
response.status_code,
200
)
self.assertContains(
response,
"<li>Period 01 of FY 2019 is for calendar month Jan 2020. "
"But you are trying to now create a period for calendar month Jan 2020 again. "
"This is not allowed because periods must be consecutive calendar months across ALL financial years.</li>"
)
class AdjustFYTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.url = reverse("controls:fy_adjust")
cls.user = get_user_model().objects.create_superuser(
username="dummy", password="dummy")
# ASSETS
assets = Nominal.objects.create(name="Assets", type="b")
current_assets = Nominal.objects.create(
parent=assets, name="Current Assets", type="b")
cls.bank_nominal = Nominal.objects.create(
parent=current_assets, name="Bank Account", type="b")
cls.debtors_nominal = Nominal.objects.create(
parent=current_assets, name="Trade Debtors", type="b")
# LIABILITIES
cls.liabilities = liabilities = Nominal.objects.create(
name="Liabilities", type="b"
)
cls.current_liabilities = current_liabilities = Nominal.objects.create(
name="Current Liabilities", type="b", parent=liabilities
)
cls.vat_output = vat_output = Nominal.objects.create(
name="Vat Output", type="b", parent=current_liabilities
)
def test_successful(self):
self.client.force_login(self.user)
# create 2019
fy_2019 = FinancialYear.objects.create(
financial_year=2019, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2019,
fy_and_period="2019" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2019, i+1, 1)
)
)
p_2019 = Period.objects.bulk_create(periods)
# create 2020
fy_2020 = FinancialYear.objects.create(
financial_year=2020, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2020,
fy_and_period="2020" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2020, i+1, 1)
)
)
p_2020 = Period.objects.bulk_create(periods)
periods = list(p_2019) + list(p_2020)
second_half_of_2019 = periods[6:12]
for p in second_half_of_2019:
p.fy = fy_2020
form_data = {}
for i, p in enumerate(periods):
form_data.update({
"period-" + str(i) + "-id": p.pk,
"period-" + str(i) + "-month_start": p.month_start.strftime("%m-%Y"),
"period-" + str(i) + "-period": p.period,
"period-" + str(i) + "-fy": p.fy_id
})
form_data.update({
"period-TOTAL_FORMS": str(len(periods)),
"period-INITIAL_FORMS": str(len(periods)),
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
response = self.client.post(self.url, data=form_data)
self.assertEqual(
response.status_code,
302
)
fy_2019.refresh_from_db()
fy_2020.refresh_from_db()
periods = Period.objects.all()
periods_2019 = periods[:6]
for i, p in enumerate(periods_2019):
p.fy = fy_2019
p.month_start = date(2019, i+1, 1)
p.fy_and_period = "2019" + str(i+1).rjust(2, "0")
p.period = str(i+1).rjust(2, "0")
periods_2020 = periods[6:]
for i, p in enumerate(periods_2020):
p.fy = fy_2020
p.month_start = date(2019, 6, 1) + relativedelta(months=+i)
p.fy_and_period = "2020" + str(i+1).rjust(2, "0")
p.period = str(i+1).rjust(2, "0")
self.assertEqual(
fy_2019.number_of_periods,
6
)
self.assertEqual(
fy_2020.number_of_periods,
18
)
def test_successful_when_bfs_are_present(self):
"""
Say you have two FYs, each of 12 periods,
2019
2020
If you extend 2019 to 18 months, 2019 and 2020 are affected by the
change.
If 2019 c/fs have already posted as b/fs into 2020 we need to delete
these bfs and anyway bfs posted in periods after.
"""
# create the fys and periods
self.client.force_login(self.user)
# create 2019
fy_2019 = FinancialYear.objects.create(
financial_year=2019, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2019,
fy_and_period="2019" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2019, i+1, 1)
)
)
p_2019 = Period.objects.bulk_create(periods)
p_201901 = fy_2019.first_period()
# create 2020
fy_2020 = FinancialYear.objects.create(
financial_year=2020, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2020,
fy_and_period="2020" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2020, i+1, 1)
)
)
p_2020 = Period.objects.bulk_create(periods)
p_202001 = fy_2020.first_period()
# post the bfs
# 2019
bf_2019_1 = NominalTransaction.objects.create(
module="NL",
header=1,
line=1,
date=date.today(),
ref="YEAR END 2018",
period=p_201901,
field="t",
type="nbf",
nominal=self.bank_nominal,
value=1000
)
bf_2019_2 = NominalTransaction.objects.create(
module="NL",
header=1,
line=2,
date=date.today(),
ref="YEAR END 2018",
period=p_201901,
field="t",
type="nbf",
nominal=self.vat_output,
value=-1000
)
# 2020
bf_2020_1 = NominalTransaction.objects.create(
module="NL",
header=2,
line=1,
date=date.today(),
ref="YEAR END 2019",
period=p_202001,
field="t",
type="nbf",
nominal=self.bank_nominal,
value=1000
)
bf_2020_2 = NominalTransaction.objects.create(
module="NL",
header=2,
line=2,
date=date.today(),
ref="YEAR END 2019",
period=p_202001,
field="t",
type="nbf",
nominal=self.vat_output,
value=-1000
)
# prepare for adjusting FY
periods = list(p_2019) + list(p_2020)
second_half_of_2019 = periods[6:12]
for p in second_half_of_2019:
p.fy = fy_2020
form_data = {}
for i, p in enumerate(periods):
form_data.update({
"period-" + str(i) + "-id": p.pk,
"period-" + str(i) + "-month_start": p.month_start.strftime("%m-%Y"),
"period-" + str(i) + "-period": p.period,
"period-" + str(i) + "-fy": p.fy_id
})
form_data.update({
"period-TOTAL_FORMS": str(len(periods)),
"period-INITIAL_FORMS": str(len(periods)),
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
# now adjust via the view
response = self.client.post(self.url, data=form_data)
self.assertEqual(
response.status_code,
302
)
fy_2019.refresh_from_db()
fy_2020.refresh_from_db()
periods = Period.objects.all()
periods_2019 = periods[:6]
for i, p in enumerate(periods_2019):
p.fy = fy_2019
p.month_start = date(2019, i+1, 1)
p.fy_and_period = "2019" + str(i+1).rjust(2, "0")
p.period = str(i+1).rjust(2, "0")
periods_2020 = periods[6:]
for i, p in enumerate(periods_2020):
p.fy = fy_2020
p.month_start = date(2019, 6, 1) + relativedelta(months=+i)
p.fy_and_period = "2020" + str(i+1).rjust(2, "0")
p.period = str(i+1).rjust(2, "0")
self.assertEqual(
fy_2019.number_of_periods,
6
)
self.assertEqual(
fy_2020.number_of_periods,
18
)
# check that the b/fs posted to 01 2020 have been deleted i.e. 2020 has been rolled back
nom_trans = NominalTransaction.objects.all().order_by("pk")
self.assertEqual(
len(nom_trans),
2
)
self.assertEqual(
nom_trans[0],
bf_2019_1
)
self.assertEqual(
nom_trans[1],
bf_2019_2
)
def test_failure_when_FY_does_contain_consecutive_periods(self):
self.client.force_login(self.user)
# create 2019
fy_2019 = FinancialYear.objects.create(
financial_year=2019, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2019,
fy_and_period="2019" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2019, i+1, 1)
)
)
p_2019 = Period.objects.bulk_create(periods)
# create 2020
fy_2020 = FinancialYear.objects.create(
financial_year=2020, number_of_periods=12)
periods = []
for i in range(12):
periods.append(
Period(
fy=fy_2020,
fy_and_period="2020" + str(i).rjust(2, "0"),
period=str(i+1).rjust(2, "0"),
month_start=date(2020, i+1, 1)
)
)
p_2020 = Period.objects.bulk_create(periods)
periods = list(p_2019) + list(p_2020)
second_half_of_2019 = periods[6:12]
for p in second_half_of_2019:
p.fy = fy_2020
second_half_of_2019[2].fy = fy_2019
form_data = {}
for i, p in enumerate(periods):
form_data.update({
"period-" + str(i) + "-id": p.pk,
"period-" + str(i) + "-month_start": p.month_start.strftime("%m-%Y"),
"period-" + str(i) + "-period": p.period,
"period-" + str(i) + "-fy": p.fy_id
})
form_data.update({
"period-TOTAL_FORMS": str(len(periods)),
"period-INITIAL_FORMS": str(len(periods)),
"period-MIN_NUM_FORMS": "0",
"period-MAX_NUM_FORMS": "1000"
})
response = self.client.post(self.url, data=form_data)
self.assertEqual(
response.status_code,
200
)
| 35.074695
| 120
| 0.519927
| 2,618
| 23,009
| 4.371276
| 0.089763
| 0.081265
| 0.020972
| 0.015729
| 0.819556
| 0.799546
| 0.776302
| 0.759175
| 0.71828
| 0.714261
| 0
| 0.097362
| 0.360772
| 23,009
| 655
| 121
| 35.128244
| 0.680718
| 0.029293
| 0
| 0.755372
| 0
| 0.003306
| 0.170525
| 0.01135
| 0
| 0
| 0
| 0
| 0.076033
| 1
| 0.018182
| false
| 0.003306
| 0.01157
| 0
| 0.033058
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
121533005dada2a6fed8efe5bc12eb51d5c94103
| 46
|
py
|
Python
|
src/main/tests/__init__.py
|
kirillismad/django_blog
|
9c5d1549ca415e02c8705e4797b55b1272c44428
|
[
"MIT"
] | null | null | null |
src/main/tests/__init__.py
|
kirillismad/django_blog
|
9c5d1549ca415e02c8705e4797b55b1272c44428
|
[
"MIT"
] | 12
|
2020-01-09T09:22:30.000Z
|
2022-03-11T23:43:55.000Z
|
src/main/tests/__init__.py
|
kirillismad/django_blog
|
9c5d1549ca415e02c8705e4797b55b1272c44428
|
[
"MIT"
] | null | null | null |
from .api_tests import *
from .tests import *
| 15.333333
| 24
| 0.73913
| 7
| 46
| 4.714286
| 0.571429
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 25
| 23
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
123eb51cc56f5b2913814f844e283aa01fb155ec
| 590
|
py
|
Python
|
eval_medseg_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_medseg_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_medseg_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_0_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_1_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_2_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_3_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_4_RandomBrightnessContrast.yml",
]
for l in ls:
os.system(l)
| 53.636364
| 112
| 0.859322
| 80
| 590
| 5.9625
| 0.3
| 0.104822
| 0.125786
| 0.199161
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0
| 0.035778
| 0.052542
| 590
| 11
| 113
| 53.636364
| 0.817531
| 0
| 0
| 0
| 0
| 0
| 0.888325
| 0.676819
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1240529f0fe8f0a16d35402b23cd41aac6932bb6
| 15,582
|
py
|
Python
|
Day08/Part2.py
|
PeterDowdy/AdventOfCode2019
|
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
|
[
"MIT"
] | null | null | null |
Day08/Part2.py
|
PeterDowdy/AdventOfCode2019
|
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
|
[
"MIT"
] | null | null | null |
Day08/Part2.py
|
PeterDowdy/AdventOfCode2019
|
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
|
[
"MIT"
] | null | null | null |
img = "221222210202222022222122222222102222021020222222222222221222222222222222122222222222220222121222221222022220222222022222222221221221222221222222022222222222201202222022222222222222212222122221222222222222222222222222222222122222222222220222121222221222022222222222022222222222221221222221222222222222221222201222222222222222222222022222121022222220222222222222222222222222122222202222222222222222220222222221222022022222222220222211222222222222022222221222222212222022222022222222202222121021222221222222220222222222222222122222202222221222120222220222122221222022222222222222222200222221222222122222222222211202222122222222222222012222020220222220222222220222022222222222122222202222220222022222220222222220222222122222222221221210222221022222022222221222201202222222222022222222022222120021222221222222222222022222222222022222222222222222022222221222022220222122122222222221221200222220022222121222222222201202222222222022222222112222120121222222222222220222122222222222022222212222220222121222222222122222222222222222222222222200222221222222121222220222211202222022222222222222012222020122022220222222220222122222222222022212202222222222222222220222020221222222222022222220221222222221022222222222221222220212222222222022222222212222020221122220222220222222022222222222222202212222221222020222221222221221222022022122222222221200222220022222122222222221221200222122222220222222102222221021022222222220222222122222222222222212222222221222120222221222121222222122222122222222222201222220222022120222222221221212222222222222222222112222221121122220222222221222222222222222022212212222222222121222221222122221222022112222222221221221222221022222022222221220221222122122222020222222202222120020022220222221220222222222222222022222202222221222020222222222221222222222022222222222220222222220022122120222222221221200022122222221222222112222021021122221222221222222122222222222222222212222220222220222220222121221222222112022222222222201222221022222120222220222212211222222222020222222010222120020122221222221220222022222222222022212222222222222222222221222220122222222122012222222222221222220222022120222221222221201122022222221222222020222122221122222222220220222122222222222222222222222221222122222222222221120222022212122222222221211222221122022020222221222211221222222222221222222121222021022022221222221222222222222222222022202212222221222020222220222120220222122112112222221220221222221222122022222222221221222222122222022222222001222021221022222222221221222222222222222122222202222222222120222222222121022222222212012222221222201222222022122122222222221222201022022222121222222212222122120222220222220222222022222222222220212222222220222122222222222121220222022222022222220220212122220022122222222220221212200122112222122222222111222022022222221222222222222022222222222222222212222222222122222222222220122222122002102222221221221122220022022220222221222200202122102222220222222212222120020022220222220222222222222222222220202222222222222120222222222220220222122022002222221220220122220122122121222221221212200122002202121222222221222122021222220222222220222022222222222022222222222220222221222221222221120222122212222222222221201022220022122021220221220211222022022212020222222000222221222022221222222220222222222222222022202212222222222121222220222220121222022212012220220220210222222122022021221221222200221122122212021222220001222220021222220222221220222122222222222121020222222222222021222220222221220222022002202221220222211222222002022120222221222220202122022202021222220011222021020022222222222221222022222222222020212222222221222121222222222121021222022012202222222221201122220012022220220221221211202022112222020222221201222120121022121222220222222022222222222220200222222222222221222222222021121222222212122220122220220022222122122220200220221211211022002202220222220212222120122220120222220221222022222222222022201222222222222222222221222122222222022122122220222220200222221212022121220221221201222122012212120222212002222020121121021222221221222222222222222122222202222221222021222221222021220222022002102220221222220022222212122221200220221210221122102212020222210220222020121020122222220220222122122222222221200202222220222121222221222220022222122112012220222221202022221102222220222221220211211022222212121222202110222122120222222222220220222122222222222120012202222220222221222220222021021222122102012220022220200122222222122122220220222201201122202212122222202101222221022122121222221221222122122022222120000222222200222222222221222020020222121012122221120220212122221102122121202220221211222022002212121222200211222020121121221222220221222122022222222121210202222200222022222220222221021222021112202222021222202222220112022221221220221221202022222222121202201222222022221121021222222222222222222022222120122212222221222120222221222222121222221202002222220220222022221022022221210220220221202022122212221212222212222121022021120222221222222022022102222102022222222221222122222222222121022222221002122221220222222222221112022120212222222202220022122212220212202020222222122022121222222221222122022022222201211222222210222020222222222221022222120112222121222220202122222012121122222221221212212222012222222222202112222221121021022220222222222222222022222202211212222201222122222220222222220222222212012120220221202122222122102012210222221200221222012202220212220012222020221021220222221220222222122002222121021222222212222221222222222021220222222202202221022222211122222212001200210220222221211222112222220222212100222120120221222220220220222022022222222101121202222221222020222221222122221222122222222022020222210122221002102200201222220220220022002202021222200111222021121220122220222222222222122002222100022212222210222121222221222022220222220112112021021221202122220122201202201222220200222122222212020202211110222122221020221220220222222222022102222122202212222222222021222220222122221222222012202122122222212022220112112102201221220212212022222212121202220022222022022120022221222220222222222202222120020212222220222122222222222121202222121022212020221220220222221112212222220222220212200112001222110202210011222020022222021221222222222222122122222102020202222210222221222220222222000222022212102220022220210222222012012221222222220210200112111202022202210102222121221020020221220222222022122102222000212212222210222121222221222122120222220022212120021220201022222102212212220220220221222022221212112222220011222122022120220221222210222122022212222010222212222221222220222220222220212222021122112120120220220222220212110212210222222220210112011222112212200002222222020022020222222222222122122012222010122202222201222222222202222121100222021002112220222220210022220112010121201222222211220002111222210212202112222220121121020220220202222122022212222102002212222221222122222200222121010222120102202022221221222222222112121002200222221221220222000222020212221211222020022020021222220221222122222022222221211202222200222020222200222221110222020102002022022222210222222222111002220220222222212122001222101202211202222220120121221221221210222022122202222010212212222222222020222212222020011222222022102222021222202222222212221022210222222210222022222222200222220000222021220120020222222221222022221012222202102202222202222120222212222121111222120022122120020221202022222122001102220222220221202122221202122222201202222122222122021222220221222222022002222010211202222222222021222212222021122222221202202222120220210122220201002011222220221212202222120202112202211000222221120121022222222222222122121002222100210202222212222020222222222220010222222022022022021221222222220122020101222220220220210222120212121222210210222222022020220221221202222022020102222221002222222210222220222200222220220222022222012021220220212222221021200210212222221201201202220212020202212221222221222120221222220221222102120222222221110222222202222121222212222121100222221122112022122222221222220111121010210221220212221212112222121210210112220220121022022222222211222022221012222120102212222200222010222201222120012222220002122221221220202122222000220011210222220200222202100202020211202221221022222020221221220201222102021022222002111202222201222221222221222221102222122102112220021220202122221222220101211220222201220012121202022200220210221222021120021220221222222022020122222000102212222221212021222210222122201222220112022220121221211022220112122100222220221221211202021222122210222120221122021121022222222202222112220122222020222202222202212122222200222120012222222212022121122220221022222100102202210221220200201202001212011201201102222222222121020202221201222002222102222120000222222221202020222212222020221222122102202022220222210222220121000211212222222220211002022222101200222000222221121022120212222210222102121102222210220222222200202001222221222221022222122100122020221220020022222120222022210222220212211122221202011211222010222022022120021202220202222122022022222121120222222211222201222211212022000222022200002022022222222122221110001022201221221212221102221222021201210002122021222220022222220210222012121122222201011222222211222011222210222221012222221020222121020220120222221210120121211221220202210002112202111220202000221221022022220200220211202122220002222201012212222221212102222202222220102222120011112222121221111222221211012222210121220220201122112212200210201212121020022022222222222200202202222022022201011222222211212212221221202021010222122120202021122221100122221120022101201022221200201002110212110202222102220121222120022222222221212122121022222122010202222212202112222200202120110222221020022020020220120022221122101110220121222221200022102212221220201020221222120220221200221222202202220212122120102212222200222022220201212120112222120202112221120220202222220122102011201222222202212120111212220201200211122200222220222222222201112022120222022000001222222201222002222220222022121222021110222220121220010022220112200200212021222220200112112222022220221011222200021122222201221212212022022012222200022212222222222211220212212222210222021222002020020221110022221111202200201020220222222201011212011201212202220020222221021201221211022012220102122222212212222200212210220202202220101222121102012020121220121222221000121022202122220221202212202202201220222022020221021022121222221211122112221212022100002212222201202121222202222221001222021111222221021220211222222222201111221220221221201112010222021210222122221111221020020211221212012212121012122010020222222211222110220220212121122222020202222122120221220222221110112120220020221222200201010212011200201100220010221220220211222220202112022222022020012212222211222100222200200221000222221102212221021221112222210220202102201221220211212010011212122201221012120022120122222210220221212022121022222001220212222202202112220222212022021222021210022221020222101222212220012201210220221201222120210202200212200101121200220122221201220222202212121102222222120202222222202212221220200022201222120202102022020221201222212200010011212220222200221002212222110221211110122010222120222200020212122112122002222020112202222220222200222211222121110222222011202222022220202122201012122000222120221220212120202202021212221010121000221022221220020202212012122112122000220212222211222010220222220121220222120020222121021220020122200110021202221121221202211101021222110210210020122000022120221210221202212012121012222211211212222220222012222210210122211222222122022101201221201022212021001122120220220202201001111212221212221221022011222122021210222200222202122102222002210202222210202210220211222120120222220100012122210220111002210100200111211221221200201002001202012221201112121110121121222220120200002022020202022021021202222222222121221221221120112222121021212221011221111122021111020020012021220201210001120202120220221210121101020221122210122202022022221112222112221202222211202112220221211122102222221220222111122221212202201222222122220121221222211022201222202212220021202122120021020211222212202002022022122001211222222202222111221200200022101222222100112220020221102222111010202002020020222210222110012222200201212102100010222020021200222200022202222002222122022202222221222102222220202221102222000100022201112220002112200200020012021022221222220022102212002202210021212210221120222211022212102012021212222221110202222212202012221021221121102222102200222120012222220202021010121202212022222200200102222212220211211102000210120120220220022201222102020002222020020222222212202202121211220221202222122010012001112220022012121221122210202021221211201200202202022212201220011102122221221210120220102202221112021222212202222201212000122200202021220222222211122200211220101002212002221200221022222221202120210222212201212122202212220220222222021210222002222102222020001212222222202112121012222221111222220110012100111220022222002000020021220022122221202021012202001200211102121211120020022200020202122212121002022202021202222210202101022021220121122222221012212222000222110112122001020220100221120202212001001202121221222122222220022221120210121201122002222202020112110212222201212022022200221220102222202210122122010220220122110021201012002020222222220002021222101200210212021122020120122220222210002122222002221111012202222222222212122202211022100222101210210002012222201122010120001212001220222210211010112202002212220101200110120221022220022210102212121012210000200222222200212000222201202222012222020001001010220222112002122210021020111021122202201200121212211221210001122120021020122222021212112102022222001101020221222200222000120112202122011222211010021120120222212212000121120112200122022222201211000202120222221021102222221221121222120221102002020211220001212202222202202101021120211021021222012212222022210222001212111200200200120022020211210222002222202201210210000011122120021211120212122102220101112021200210222002212211122201211120100222022101010011000220210122210201100221011222022222200122002202012202221122012010122222221210121221212022221011022201021221222121222100122200210022100222202002011210021220212200000201010200122220001222222020211202111200222010011211121022221222222201012102222220102022200222222120202020121111220022212222221120101120001222112110120222011012222022110202222211211212222022201022012120220121021202121210212222120122200021201212222211222112121221201121200222120110222202200221021220101120012002210120100211222020220202121220210021212002122120220220022222222212022100212221222220222212212022022220221120122222110021212220202221012120202011000220211122211202202201212212121202200101012122220220121111220202222122020022220101012211222012202122121022000022121222210002120110120220010121120202111012211122011220222010120202221202010222102200020120121012120212202112020220011021201220222012222212120022202221012222022021010201222221021002020101020111002120210210212201021202100222100221222210120120120220020202212202020001111012212221222200220201220220002020111222101001101221200221210120211200210020121121100212222112101202112222102100001010220220121202021210022102020201211200001220222010210111020000020020011222002220220222222222110122222120012021201211010102022202010001111110222111021022111212202210210020020120101221200011121020001221010200002100120000201101222101120021022110220110010002021202220"
width=25
height=6
pixels_per_layer = width*height
final_image = ["2" for x in range(0,pixels_per_layer)]
for x in range(0, len(img), pixels_per_layer):
layer = img[x:x+pixels_per_layer]
for y in range(0,pixels_per_layer):
if final_image[y] == "2" and layer[y] != "2":
final_image[y] = layer[y]
msg = ""
for z in range(0,pixels_per_layer):
if z % width == 0:
msg = msg + "\n"
if final_image[z] == "2": msg = msg + "-"
if final_image[z] == "1": msg = msg + "#"
if final_image[z] == "0": msg = msg + " "
print(msg)
| 649.25
| 15,008
| 0.98402
| 100
| 15,582
| 153.15
| 0.26
| 0.003526
| 0.005485
| 0.002742
| 0.008097
| 0.007052
| 0.003134
| 0
| 0
| 0
| 0
| 0.972724
| 0.009434
| 15,582
| 23
| 15,009
| 677.478261
| 0.019501
| 0
| 0
| 0
| 0
| 0
| 0.963417
| 0.962711
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
124e200558537f6e61b93bf7c4a84e3a109d492e
| 7,316
|
py
|
Python
|
stylegan/miscellaneous/write_bash_script_in_batch.py
|
VITA-Group/BlackBoxGANCollapse
|
e52afab99e8b2e08a92aab86d84d53db77aa8c75
|
[
"MIT"
] | 3
|
2021-08-04T09:08:05.000Z
|
2022-01-26T07:32:41.000Z
|
stylegan/miscellaneous/write_bash_script_in_batch.py
|
VITA-Group/BlackBoxGANCollapse
|
e52afab99e8b2e08a92aab86d84d53db77aa8c75
|
[
"MIT"
] | null | null | null |
stylegan/miscellaneous/write_bash_script_in_batch.py
|
VITA-Group/BlackBoxGANCollapse
|
e52afab99e8b2e08a92aab86d84d53db77aa8c75
|
[
"MIT"
] | 1
|
2021-12-09T06:37:22.000Z
|
2021-12-09T06:37:22.000Z
|
import os
node_gpu_map = {'5m':'3','5n':'0123','5x':'023','72':'1','76':'2'}
#{'70':'34','72':'134','74':'134','76':'24','77':'13'}
node_gpu_lst = []
for k,v in node_gpu_map.items():
for gpu in v:
node_gpu_lst.append((k,int(gpu)))
print(node_gpu_lst)
for i in range(10):
with open('bash_run_{}_{}.sh'.format(node_gpu_lst[i][0],node_gpu_lst[i][1]), 'w') as fp:
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 128\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/128/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/128/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 256\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/256/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/256/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 512\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/512/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/progressive_growing_of_gans/sampling/monte_carlo_sampling_100k/512/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 128\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/128/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/128/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 256\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/256/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/256/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
fp.write('#!/bin/bash\n')
fp.write('for ((i=0; i<1; i++))\n')
fp.write('do\n')
fp.write('\tstart=$(($i*10000+{}))\n'.format(i*10000))
fp.write('\tend=$(($i*10000+10000+{}))\n'.format(i*10000))
fp.write('\techo \"$start\"\n')
fp.write('\techo \"$end\"\n')
fp.write('cd /mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python random_sample_images.py --start=${start} --end=${end} --resolution 512\n" % node_gpu_lst[i][1])
fp.write('cd /mnt/ilcompf5d1/user/zwu/InsightFace-tensorflow\n')
fp.write("CUDA_VISIBLE_DEVICES=%d python get_embd.py --config_path=\"configs/config_ms1m_100.yaml\" "\
"--model_path=\"pretrained/config_ms1m_100_334k/best-m-334000\" "\
"--read_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/512/images/${start}_${end}\" "\
"--save_path=\"/mnt/ilcompf5d1/user/zwu/stylegan-encoder/sampling/monte_carlo_sampling_100k/512/embds_pkls/${start}_${end}.pkl\"\n" % node_gpu_lst[i][1])
fp.write('done\n')
| 67.740741
| 181
| 0.620694
| 1,077
| 7,316
| 4.013928
| 0.096565
| 0.116586
| 0.086977
| 0.111034
| 0.955586
| 0.952811
| 0.952811
| 0.952811
| 0.952811
| 0.952811
| 0
| 0.074249
| 0.158693
| 7,316
| 107
| 182
| 68.373832
| 0.628107
| 0.007244
| 0
| 0.787879
| 0
| 0.060606
| 0.382402
| 0.19127
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010101
| 0
| 0.010101
| 0.010101
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
126696b923bec9fdf36c21760dd645eb525e98e6
| 2,382
|
py
|
Python
|
fusionsid/http.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | 5
|
2022-03-05T23:29:33.000Z
|
2022-03-20T07:44:20.000Z
|
fusionsid/http.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | null | null | null |
fusionsid/http.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | null | null | null |
import aiohttp
from typing import Dict
class HTTPClient:
def __init__(self):
self.BASE_URL = "https://api.fusionsid.xyz/api"
async def get_image(self, url) -> bytes:
"""
This function makes a get request to a url and returns the image
Parameters
url (str) : The url to make a request to
Returns:
:class:`bytes` : The image
"""
async with aiohttp.ClientSession() as session:
async with session.get(f"{self.BASE_URL}/{url}") as resp:
resp = await resp.read()
return resp
async def get_url_image(self, url) -> bytes:
"""
This function makes a get request to a url and returns the image
Parameters
----------
url (str) : The url to make a request to
Returns
-------
:class:`bytes` : The image
"""
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
resp = await resp.read()
return resp
async def get_json(self, url) -> Dict:
"""
This function makes a GET request to a url and returns the json
Parameters
url (str) : The url to make a request to
data (Dict, optional) : This is a dictionary of any extra params to send the request
Returns:
:class:`Dict` : The json response
"""
async with aiohttp.ClientSession() as session:
async with session.get(f"{self.BASE_URL}/{url}") as resp:
try:
response = await resp.json()
except Exception:
response = resp
return response
async def get_url_json(self, url) -> Dict:
"""
This function makes a GET request to a url and returns the json
Parameters
url (str) : The url to make a request to
data (Dict, optional) : This is a dictionary of any extra params to send the request
Returns:
:class:`Dict` : The json response
"""
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
try:
response = await resp.json()
except Exception:
response = resp
return response
| 30.538462
| 96
| 0.542401
| 287
| 2,382
| 4.456446
| 0.191638
| 0.056294
| 0.034402
| 0.056294
| 0.905395
| 0.905395
| 0.905395
| 0.905395
| 0.905395
| 0.905395
| 0
| 0
| 0.376994
| 2,382
| 77
| 97
| 30.935065
| 0.86186
| 0
| 0
| 0.709677
| 0
| 0
| 0.055862
| 0.033045
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.064516
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6044d19ef4e0761eaf2c007ca86f84c4d84f7e6
| 104
|
py
|
Python
|
src/tf_transformers/losses/__init__.py
|
s4sarath/tf-transformers
|
361f7b01c7816034ddfc8661f8b6a967835bc1de
|
[
"Apache-2.0"
] | 2
|
2021-03-31T17:48:16.000Z
|
2021-08-22T11:52:19.000Z
|
src/tf_transformers/losses/__init__.py
|
Vibha111094/tf-transformers
|
f26d440a4de0557e0e481279bfd70a732aaa8825
|
[
"Apache-2.0"
] | null | null | null |
src/tf_transformers/losses/__init__.py
|
Vibha111094/tf-transformers
|
f26d440a4de0557e0e481279bfd70a732aaa8825
|
[
"Apache-2.0"
] | null | null | null |
from tf_transformers.losses.cross_entropy import cross_entropy_loss, cross_entropy_loss_label_smoothing
| 52
| 103
| 0.923077
| 15
| 104
| 5.866667
| 0.666667
| 0.409091
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048077
| 104
| 1
| 104
| 104
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d617cc2f1148571c8bc3349a0e441aef94e19af7
| 44
|
py
|
Python
|
robosuite-extra/robosuite_extra/models/grippers/__init__.py
|
eugval/sim2real_dynamics_simulation
|
2ed175803faa38792f6becc2dc91f44ae71ed9c2
|
[
"MIT"
] | 16
|
2020-07-28T14:35:44.000Z
|
2021-11-28T01:50:51.000Z
|
robosuite-extra/robosuite_extra/models/grippers/__init__.py
|
eugval/sim2real_dynamics_simulation
|
2ed175803faa38792f6becc2dc91f44ae71ed9c2
|
[
"MIT"
] | 1
|
2020-11-26T07:58:30.000Z
|
2020-12-01T04:40:28.000Z
|
robosuite-extra/robosuite_extra/models/grippers/__init__.py
|
eugval/sim2real_dynamics_simulation
|
2ed175803faa38792f6becc2dc91f44ae71ed9c2
|
[
"MIT"
] | 2
|
2020-10-18T01:38:49.000Z
|
2021-12-31T10:56:41.000Z
|
from .gripper_factory import gripper_factory
| 44
| 44
| 0.909091
| 6
| 44
| 6.333333
| 0.666667
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d64c9955b989b50653ee44901cc63e1e314439d2
| 40,959
|
py
|
Python
|
envs/babyai/levels/iclr19_levels.py
|
AliengirlLiv/babyai
|
51421ee11538bf110c5b2d0c84a15f783d854e7d
|
[
"MIT"
] | null | null | null |
envs/babyai/levels/iclr19_levels.py
|
AliengirlLiv/babyai
|
51421ee11538bf110c5b2d0c84a15f783d854e7d
|
[
"MIT"
] | null | null | null |
envs/babyai/levels/iclr19_levels.py
|
AliengirlLiv/babyai
|
51421ee11538bf110c5b2d0c84a15f783d854e7d
|
[
"MIT"
] | null | null | null |
"""
Levels described in the ICLR 2019 submission.
"""
from .levelgen import *
from .teachable_robot_levels import Level_TeachableRobot
class Level_IntroPrimitives(Level_TeachableRobot):
"""
Get a reward for doing what the teacher tells you to do.
"""
def __init__(self, max_delay=0, room_size=8, strict=False, seed=None, **kwargs):
self.room_size = room_size
self.max_delay = max_delay
self.strict = strict
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
action = self.action_space.sample()
delay = self.np_random.randint(0, self.max_delay + 1)
return {
"task": (action, delay),
"instrs": TakeActionInstr(action, delay, self.strict)
}
def add_objs(self, task):
num_dists = self.np_random.randint(0, min(20, (self.room_size - 3) ** 2))
dists = self.add_distractors(num_distractors=num_dists, all_unique=False)
return dists, None
class Level_IntroPrimitivesD0(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=0, seed=seed, **kwargs)
class Level_IntroPrimitivesD1(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=1, seed=seed, **kwargs)
class Level_IntroPrimitivesD5(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=5, seed=seed, **kwargs)
class Level_IntroPrimitivesD10(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=10, seed=seed, **kwargs)
class Level_IntroPrimitivesD0Strict(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=0, seed=seed, strict=True, **kwargs)
class Level_IntroPrimitivesD1Strict(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=1, seed=seed, strict=True, **kwargs)
class Level_IntroPrimitivesD5Strict(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=5, seed=seed, strict=True, **kwargs)
class Level_IntroPrimitivesD10Strict(Level_IntroPrimitives):
def __init__(self, seed=None, **kwargs):
super().__init__(max_delay=10, seed=seed, strict=True, **kwargs)
class Level_GoToRedBallGrey(Level_TeachableRobot):
"""
Go to the red ball, single room, with distractors.
The distractors are all grey to reduce perceptual complexity.
This level has distractors but doesn't make use of language.
"""
def __init__(self, room_size=8, num_dists=7, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
return {
"task": ["ball", "red"],
"instrs": GoToInstr(ObjDesc("ball", "red"))
}
def add_objs(self, _):
obj, _ = self.add_object(0, 0, 'ball', 'red')
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
for dist in dists:
dist.color = 'grey'
return dists + [obj], obj
class Level_GoToRedBall(Level_TeachableRobot):
"""
Go to the red ball, single room, with distractors.
This level has distractors but doesn't make use of language.
"""
def __init__(self, room_size=8, num_dists=7, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
return {
"task": ["ball", "red"],
"instrs": GoToInstr(ObjDesc("ball", "red"))
}
def add_objs(self, _):
obj, _ = self.add_object(0, 0, 'ball', 'red')
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
return dists + [obj], obj
class Level_GoToRedBallNoDists(Level_GoToRedBall):
"""
Go to the red ball. No distractors present.
"""
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=0, seed=seed, **kwargs)
class Level_GoToObj(Level_TeachableRobot):
"""
Go to an object, inside a single room with no doors, no distractors
"""
def __init__(self, room_size=8, seed=None, **kwargs):
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": GoToInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
obj_type, obj_color = task
obj, _ = self.add_object(0, 0, obj_type, obj_color)
return [obj], obj
class Level_GoToObjS4(Level_GoToObj):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=4, seed=seed, **kwargs)
class Level_GoToObjS5(Level_GoToObj):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, seed=seed, **kwargs)
class Level_GoToObjS6(Level_GoToObj):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, seed=seed, **kwargs)
class Level_GoToObjS7(Level_GoToObj):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, seed=seed, **kwargs)
class Level_GoToLocal(Level_TeachableRobot):
"""
Go to an object, inside a single room with no doors, multiple distractors
"""
def __init__(self, room_size=8, num_dists=8, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": GoToInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
obj_type, obj_color = task
obj, _ = self.add_object(0, 0, obj_type, obj_color)
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
return dists + [obj], obj
class Level_GoToLocalS5N2(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=2, seed=seed, **kwargs)
class Level_GoToLocalS6N2(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=2, seed=seed, **kwargs)
class Level_GoToLocalS6N3(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=3, seed=seed, **kwargs)
class Level_GoToLocalS6N4(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=4, seed=seed, **kwargs)
class Level_GoToLocalS7N4(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, num_dists=4, seed=seed, **kwargs)
class Level_GoToLocalS7N5(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, num_dists=5, seed=seed, **kwargs)
class Level_GoToLocalS8N2(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=2, seed=seed, **kwargs)
class Level_GoToLocalS8N3(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=3, seed=seed, **kwargs)
class Level_GoToLocalS8N4(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=4, seed=seed, **kwargs)
class Level_GoToLocalS8N5(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=5, seed=seed, **kwargs)
class Level_GoToLocalS8N6(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=6, seed=seed, **kwargs)
class Level_GoToLocalS8N7(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=7, seed=seed, **kwargs)
class Level_PickupLocal(Level_TeachableRobot):
"""
Pick up an object, inside a single room with no doors, multiple distractors
"""
def __init__(self, room_size=8, num_dists=8, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": PickupInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
obj_type, obj_color = task
obj, _ = self.add_object(0, 0, obj_type, obj_color)
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
return dists + [obj], obj
class Level_PickupLocalS5N2(Level_PickupLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=2, seed=seed, **kwargs)
class Level_PickupLocalS6N3(Level_PickupLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=3, seed=seed, **kwargs)
class Level_PickupLocalS7N4(Level_PickupLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, num_dists=4, seed=seed, **kwargs)
class Level_PickupLocalS8N7(Level_PickupLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=8, num_dists=7, seed=seed, **kwargs)
class Level_PutNextLocal(Level_TeachableRobot):
"""
Put an object next to another object, inside a single room
with no doors, no distractors
"""
def __init__(self, room_size=8, num_dists=8, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
num_rows=1,
num_cols=1,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
o1_type, o1_color = self.sample_object()
o2_type, o2_color = o1_type, o1_color
while (o1_type == o2_type and o1_color == o2_color):
o2_type, o2_color = self.sample_object()
return {
"task": (o1_type, o1_color, o2_type, o2_color),
"instrs": PutNextInstr(ObjDesc(o1_type, o1_color), ObjDesc(o2_type, o2_color))
}
def add_objs(self, task):
o1_type, o1_color, o2_type, o2_color = task
obj1, _ = self.add_object(0, 0, o1_type, o1_color)
obj2, _ = self.add_object(0, 0, o2_type, o2_color)
dists = self.add_distractors(num_distractors=self.num_dists - 2, all_unique=True)
self.check_objs_reachable()
return dists + [obj1, obj2], (obj1, obj2)
class Level_PutNextLocalS5N3(Level_PutNextLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=3, seed=seed, **kwargs)
class Level_PutNextLocalS5N2(Level_PutNextLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=2, seed=seed, **kwargs)
class Level_PutNextLocalS6N4(Level_PutNextLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=4, seed=seed, **kwargs)
class Level_PutNextLocalS7N4(Level_PutNextLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, num_dists=4, seed=seed, **kwargs)
class Level_OpenLocal(Level_TeachableRobot):
"""
Open a door in the current room (0,0), since that's currently where we initialize the agent.
"""
def __init__(self, room_size=8, num_dists=8, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
# We only need the color
_, obj_color = self.sample_object()
return {
"task": obj_color,
"instrs": OpenInstr(ObjDesc("door", obj_color))
}
def add_objs(self, task):
obj_color = task
self.connect_all()
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
# Make sure at least one door has the required color by randomly setting one door color
doors = []
room = self.get_room(0, 0)
for door in room.doors:
if door:
doors.append(door)
door = self._rand_elem(doors)
door.color = obj_color
self.check_objs_reachable()
return dists + self.get_doors(), door
class Level_OpenLocalS5N2(Level_OpenLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=2, seed=seed, **kwargs)
class Level_OpenLocalS5N3(Level_OpenLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_dists=3, seed=seed, **kwargs)
class Level_OpenLocalS6N4(Level_OpenLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=6, num_dists=4, seed=seed, **kwargs)
class Level_OpenLocalS7N4(Level_OpenLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=7, num_dists=4, seed=seed, **kwargs)
class Level_GoTo(Level_TeachableRobot):
"""
Go to an object, the object may be in another room. Many distractors.
"""
def __init__(
self,
room_size=8,
num_rows=3,
num_cols=3,
num_dists=18,
doors_open=False,
seed=None,
**kwargs
):
self.num_dists = num_dists
self.doors_open = doors_open
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": GoToInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
self.connect_all()
obj_type, obj_color = task
# Choose room
room_i = self._rand_int(0, self.num_rows)
room_j = self._rand_int(0, self.num_cols)
obj, _ = self.add_object(room_i, room_j, obj_type, obj_color)
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
# If requested, open all the doors
if self.doors_open:
self.open_all_doors()
self.check_objs_reachable()
return dists + self.get_doors() + [obj], obj
class Level_GoToSmall2by2(Level_GoTo):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=2,
num_cols=2,
room_size=5,
num_dists=6,
**kwargs
)
class Level_GoToSmall3by3(Level_GoTo):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=3,
num_cols=3,
room_size=4,
num_dists=5,
**kwargs
)
class Level_Seek(Level_GoTo):
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": SeekInstr(ObjDesc(obj_type, obj_color))
}
class Level_SeekL0(Level_GoToRedBallNoDists):
def make_mission(self):
return {
"task": ('ball', 'red'),
"instrs": SeekInstr(ObjDesc('ball', 'red'))
}
class Level_SeekSmall2by2(Level_Seek):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=2,
num_cols=2,
room_size=5,
num_dists=6,
**kwargs
)
class Level_SeekSmall3by3(Level_Seek):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=3,
num_cols=3,
room_size=4,
num_dists=5,
**kwargs
)
class Level_SeekLocal(Level_GoToLocal):
def __init__(self, **kwargs):
super().__init__(
**kwargs
)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": SeekInstr(ObjDesc(obj_type, obj_color))
}
class Level_GoToHeldout(Level_GoTo):
def make_mission(self):
obj_type, obj_color = self.sample_object()
obj_color = 'yellow'
return {
"task": (obj_type, obj_color),
"instrs": GoToInstr(ObjDesc(obj_type, obj_color))
}
class Level_GoToGreenBox(Level_GoTo):
def make_mission(self):
obj_color = 'green'
obj_type = 'box'
return {
"task": (obj_type, obj_color),
"instrs": GoToUnknownInstr(ObjDesc(obj_type, obj_color))
}
class Level_GoToGreenBoxLocal(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(
room_size=16,
seed=seed,
**kwargs
)
def make_mission(self):
obj_color = 'green'
obj_type = 'box'
return {
"task": (obj_type, obj_color),
"instrs": GoToUnknownInstr(ObjDesc(obj_type, obj_color))
}
class Level_GoToDouble(Level_GoTo):
def __init__(
self,
room_size=8,
num_rows=3,
num_cols=3,
num_dists=18,
doors_open=False,
seed=None,
**kwargs
):
self.num_dists = num_dists
self.doors_open = doors_open
super().__init__(
num_rows=num_rows,
num_cols=num_cols,
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
obj1_type, obj1_color = self.sample_object()
obj2_type, obj2_color = self.sample_object()
return {
"task": (obj1_type, obj1_color, obj2_type, obj2_color),
"instrs": BeforeInstr(
GoToInstr(ObjDesc(obj1_type, obj1_color)),
GoToInstr(ObjDesc(obj2_type, obj2_color))
)
}
def add_objs(self, task):
self.connect_all()
obj1_type, obj1_color, obj2_type, obj2_color = task
# Choose room
room_i = self._rand_int(0, self.num_rows)
room_j = self._rand_int(0, self.num_cols)
obj, _ = self.add_object(room_i, room_j, obj1_type, obj1_color)
room_i = self._rand_int(0, self.num_rows)
room_j = self._rand_int(0, self.num_cols)
obj, _ = self.add_object(room_i, room_j, obj2_type, obj2_color)
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
# If requested, open all the doors
if self.doors_open:
self.open_all_doors()
self.check_objs_reachable()
return dists + self.get_doors() + [obj], obj
class Level_GoToOpen(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(doors_open=True, seed=seed, **kwargs)
class Level_GoToObjMaze(Level_GoTo):
"""
Go to an object, the object may be in another room. No distractors.
"""
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, doors_open=False, seed=seed, **kwargs)
class Level_GoToObjMazeOpen(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, doors_open=True, seed=seed, **kwargs)
class Level_GoToObjMazeS4R2(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, room_size=4, num_rows=2, num_cols=2, seed=seed, **kwargs)
class Level_GoToObjMazeS4(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, room_size=4, seed=seed, **kwargs)
class Level_GoToObjMazeS5(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, room_size=5, seed=seed, **kwargs)
class Level_GoToObjMazeS6(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, room_size=6, seed=seed, **kwargs)
class Level_GoToObjMazeS6(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=1, room_size=7, seed=seed, **kwargs)
class Level_GoToObjDistractors(Level_GoTo):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=60, room_size=8, seed=seed, **kwargs)
def add_objs(self, task):
obj_list, obj = super().add_objs(task)
obj_type = obj.type
obj_color = obj.color
for dist in obj_list[:-1]:
if dist.type == obj_type and dist.color == obj_color:
dist.color = self._rand_elem([c for c in COLOR_NAMES if not c == obj_color])
self.check_objs_reachable()
return obj_list, obj
class Level_GoToObjDistractorsLocal(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=14, room_size=8, seed=seed, **kwargs)
def add_objs(self, task):
obj_list, obj = super().add_objs(task)
obj_type = obj.type
obj_color = obj.color
for dist in obj_list[:-1]:
if dist.type == obj_type and dist.color == obj_color:
dist.color = self._rand_elem([c for c in COLOR_NAMES if not c == obj_color])
self.check_objs_reachable()
return obj_list, obj
class Level_GoToObjDistractorsLocalBig(Level_GoToLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=80, room_size=20, seed=seed, **kwargs)
def add_objs(self, task):
obj_list, obj = super().add_objs(task)
obj_type = obj.type
obj_color = obj.color
for dist in obj_list[:-1]:
if dist.type == obj_type and dist.color == obj_color:
dist.color = self._rand_elem([c for c in COLOR_NAMES if not c == obj_color])
self.check_objs_reachable()
return obj_list, obj
class Level_GoToImpUnlock(Level_TeachableRobot):
"""
Go to an object, which may be in a locked room.
Competencies: Maze, GoTo, ImpUnlock
No unblocking.
"""
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": GoToInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
obj_type, obj_color = task
while True:
jd = self._rand_int(0, self.num_rows)
id = self._rand_int(0, self.num_cols)
locked_room = self.get_room(id, jd)
agent_room = self.room_from_pos(*self.agent_pos)
if not (locked_room is agent_room):
break
_, color = self.sample_object()
door, pos = self.add_door(id, jd, color=color, locked=True)
# Add the key to a different room
while True:
jk = self._rand_int(0, self.num_rows)
ik = self._rand_int(0, self.num_cols)
if ik == id and jk == jd:
continue
key, _ = self.add_object(ik, jk, 'key', door.color)
break
self.connect_all()
# Add distractors to all but the locked room.
# We do this to speed up the reachability test,
# which otherwise will reject all levels with
# objects in the locked room.
all_dists = []
for i in range(self.num_cols):
for j in range(self.num_rows):
if i is not id or j is not jd:
dists = self.add_distractors(
i,
j,
num_distractors=2,
all_unique=False,
)
all_dists += dists
for dist in all_dists:
if dist.type == obj_type and dist.color == obj_color:
dist.color = self._rand_elem([c for c in COLOR_NAMES if not c == obj_color])
self.check_objs_reachable()
obj, _ = self.add_object(id, jd, obj_type, obj_color)
return all_dists + self.get_doors() + [obj, key], obj
class Level_GoToImpUnlockLocal(Level_GoToImpUnlock):
def __init__(self, seed=None, **kwargs):
super().__init__(
room_size=16,
seed=seed,
**kwargs
)
def __init__(self, **kwargs):
super().__init__(
num_rows=1,
num_cols=2,
**kwargs
)
class Level_Pickup(Level_TeachableRobot):
"""
Pick up an object, the object may be in another room.
"""
def __init__(
self,
num_dists=18,
doors_open=False,
**kwargs
):
self.num_dists = num_dists
self.doors_open = doors_open
super().__init__(**kwargs)
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": PickupInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
self.connect_all()
room_i = self._rand_int(0, self.num_rows)
room_j = self._rand_int(0, self.num_cols)
obj_type, obj_color = task
obj, _ = self.add_object(room_i, room_j, obj_type, obj_color)
dists = self.add_distractors(num_distractors=17, all_unique=False)
self.check_objs_reachable()
return dists + self.get_doors() + [obj], obj
class Level_PickupObjBigger(Level_Pickup):
def __init__(self, seed=None, **kwargs):
super().__init__(num_dists=18, room_size=6, num_rows=5, num_cols=5, seed=seed, **kwargs)
class Level_UnblockPickup(Level_TeachableRobot):
"""
Pick up an object, the object may be in another room. The path may
be blocked by one or more obstructors.
"""
def make_mission(self):
obj_type, obj_color = self.sample_object()
return {
"task": (obj_type, obj_color),
"instrs": PickupInstr(ObjDesc(obj_type, obj_color))
}
def add_objs(self, task):
self.connect_all()
room_i = self._rand_int(0, self.num_rows)
room_j = self._rand_int(0, self.num_cols)
obj_type, obj_color = task
obj, _ = self.add_object(room_i, room_j, obj_type, obj_color)
dists = self.add_distractors(num_distractors=39, all_unique=False)
if self.check_objs_reachable(raise_exc=False):
raise RejectSampling('all objects reachable')
return dists + self.get_doors() + [obj], obj
class Level_Open(Level_TeachableRobot):
"""
Open a door, which may be in another room
"""
def __init__(self, num_dists=8, **kwargs):
self.num_dists = num_dists
super().__init__(
**kwargs
)
def make_mission(self):
# We only need the color
_, obj_color = self.sample_object()
return {
"task": obj_color,
"instrs": OpenInstr(ObjDesc("door", obj_color))
}
def add_objs(self, task):
obj_color = task
self.connect_all()
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
# Make sure at least one door has the required color by randomly setting one door color
doors = []
for i in range(self.num_rows):
for j in range(self.num_cols):
room = self.get_room(i, j)
for door in room.doors:
if door:
doors.append(door)
door = self._rand_elem(doors)
door.color = obj_color
return dists + self.get_doors(), door
class Level_OpenSmall2by2(Level_Open):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=2,
num_cols=2,
room_size=6,
num_dists=4,
**kwargs
)
class Level_OpenSmall3by3(Level_Open):
def __init__(
self,
**kwargs
):
super().__init__(
num_rows=3,
num_cols=3,
room_size=5,
num_dists=5,
**kwargs
)
class Level_OpenDoorsDouble(Level_TeachableRobot):
"""
Open door X, then open door Y
The two doors are facing opposite directions, so that the agent
Can't see whether the door behind him is open.
This task requires memory (recurrent policy) to be solved effectively.
"""
def make_mission(self):
# We only need the color
colors = self._rand_subset(COLOR_NAMES, 2)
return {
"task": colors,
"instrs": BeforeInstr(
OpenInstr(ObjDesc('door', colors[0])),
OpenInstr(ObjDesc('door', colors[1]))
)
}
def add_objs(self, task):
first_color, second_color = task
door1, _ = self.add_door(1, 1, 2, color=first_color, locked=False)
door2, _ = self.add_door(1, 1, 0, color=second_color, locked=False)
self.connect_all()
dists = self.add_distractors(num_distractors=18, all_unique=False)
self.check_objs_reachable()
return dists + self.get_doors(), (door1, door2)
class Level_Unlock(Level_TeachableRobot):
"""
Unlock a door.
Competencies: Maze, Open, Unlock. No unblocking.
"""
def make_mission(self):
_, obj_color = self.sample_object()
return {
"task": obj_color,
"instrs": OpenInstr(ObjDesc("door", obj_color))
}
def add_objs(self, task):
obj_color = task
while True:
jd = self._rand_int(0, self.num_rows)
id = self._rand_int(0, self.num_cols)
locked_room = self.get_room(id, jd)
agent_room = self.room_from_pos(*self.agent_pos)
if not locked_room is agent_room:
break
door, pos = self.add_door(id, jd, color=obj_color, locked=True)
# Add the key to a different room
while True:
jk = self._rand_int(0, self.num_rows)
ik = self._rand_int(0, self.num_cols)
if ik == id and jk == jd:
continue
key, _ = self.add_object(ik, jk, 'key', door.color)
break
# Ensure that the locked door is the only
# door of that color
colors = list(filter(lambda c: not c == obj_color, COLOR_NAMES))
self.connect_all(door_colors=colors)
# Add distractors to all but the locked room.
# We do this to speed up the reachability test,
# which otherwise will reject all levels with
# objects in the locked room.
all_dists = []
for i in range(self.num_cols):
for j in range(self.num_rows):
if i is not id or j is not jd:
dists = self.add_distractors(
i,
j,
num_distractors=3,
all_unique=False
)
all_dists += dists
self.check_objs_reachable()
return [key] + all_dists + self.get_doors(), door
class Level_UnlockTopLeft(Level_TeachableRobot):
"""
Unlock a door.
Competencies: Maze, Open, Unlock. No unblocking.
"""
def __init__(self, *args, **kwargs):
room_size = 8
super().__init__(*args, **kwargs, max_steps=room_size**2,)
def make_mission(self):
_, obj_color = self.sample_object()
return {
"task": obj_color,
"instrs": OpenInstr(ObjDesc("door", obj_color))
}
def add_objs(self, task):
obj_color = task
id = 1
jd = 0
door, pos = self.add_door(id, jd, door_idx=2, color=obj_color, locked=True)
# Add the key to a different room
jk = 0
ik = 0
key, _ = self.add_object(ik, jk, 'key', door.color)
# Ensure that the locked door is the only
# door of that color
colors = list(filter(lambda c: not c == obj_color, COLOR_NAMES))
self.connect_all(door_colors=colors)
# Add distractors to all but the locked room.
# We do this to speed up the reachability test,
# which otherwise will reject all levels with
# objects in the locked room.
all_dists = []
for i in range(self.num_cols):
for j in range(self.num_rows):
if i is not id or j is not jd:
dists = self.add_distractors(
i,
j,
num_distractors=3,
all_unique=False
)
all_dists += dists
self.check_objs_reachable()
return [key] + all_dists + self.get_doors(), door
class Level_UnlockLocal(Level_Unlock):
def __init__(self, **kwargs):
super().__init__(
num_rows=1,
num_cols=2,
room_size=8,
**kwargs
)
class Level_PutNext(Level_TeachableRobot):
"""
Put an object next to another object. Either of these may be in another room.
"""
def __init__(self, room_size=8, num_dists=16, seed=None, **kwargs):
self.num_dists = num_dists
super().__init__(
room_size=room_size,
seed=seed,
**kwargs
)
def make_mission(self):
o1_type, o1_color = self.sample_object()
o2_type, o2_color = o1_type, o1_color
while (o1_type == o2_type and o1_color == o2_color):
o2_type, o2_color = self.sample_object()
return {
"task": (o1_type, o1_color, o2_type, o2_color),
"instrs": PutNextInstr(ObjDesc(o1_type, o1_color), ObjDesc(o2_type, o2_color))
}
def add_objs(self, task):
self.connect_all()
o1_type, o1_color, o2_type, o2_color = task
i1 = self._rand_int(0, self.num_rows)
j1 = self._rand_int(0, self.num_cols)
i2 = self._rand_int(0, self.num_rows)
j2 = self._rand_int(0, self.num_cols)
obj1, _ = self.add_object(i1, j1, o1_type, o1_color)
obj2, _ = self.add_object(i2, j2, o2_type, o2_color)
dists = self.add_distractors(num_distractors=self.num_dists, all_unique=False)
self.check_objs_reachable()
return dists + self.get_doors() + [obj1, obj2], (obj1, obj2)
class Level_PutNextSameColor(Level_PutNext):
def make_mission(self):
o1_type, o1_color = self.sample_object()
o2_type, o2_color = o1_type, o1_color
while o1_type == o2_type:
o2_type, o2_color = self.sample_object()
return {
"task": (o1_type, o1_color, o2_type, o1_color),
"instrs": PutNextSameColorInstr(ObjDesc(o1_type, o1_color), ObjDesc(o2_type, o1_color))
}
class Level_PutNextSameColorLocal(Level_PutNextLocal):
def __init__(self, seed=None, **kwargs):
super().__init__(
room_size=16,
seed=seed,
**kwargs
)
def make_mission(self):
o1_type, o1_color = self.sample_object()
o2_type, o2_color = o1_type, o1_color
while o1_type == o2_type:
o2_type, o2_color = self.sample_object()
return {
"task": (o1_type, o1_color, o2_type, o1_color),
"instrs": PutNextSameColorInstr(ObjDesc(o1_type, o1_color), ObjDesc(o2_type, o1_color))
}
class Level_PickupLoc(LevelGen):
"""
Pick up an object which may be described using its location. This is a
single room environment.
Competencies: PickUp, Loc. No unblocking.
"""
def __init__(self, seed=None, **kwargs):
# We add many distractors to increase the probability
# of ambiguous locations within the same room
super().__init__(
seed=seed,
action_kinds=['pickup'],
instr_kinds=['action'],
num_rows=1,
num_cols=1,
num_dists=8,
locked_room_prob=0,
locations=True,
unblocking=False,
**kwargs
)
class Level_GoToSeq(LevelGen):
"""
Sequencing of go-to-object commands.
Competencies: Maze, GoTo, Seq
No locked room.
No locations.
No unblocking.
"""
def __init__(
self,
room_size=8,
num_rows=3,
num_cols=3,
num_dists=18,
seed=None,
**kwargs
):
super().__init__(
room_size=room_size,
num_rows=num_rows,
num_cols=num_cols,
num_dists=num_dists,
seed=seed,
action_kinds=['goto'],
locked_room_prob=0,
locations=False,
unblocking=False,
**kwargs
)
class Level_GoToSeqS5R2(Level_GoToSeq):
def __init__(self, seed=None, **kwargs):
super().__init__(room_size=5, num_rows=2, num_cols=2, num_dists=4, seed=seed, **kwargs)
class Level_Synth(LevelGen):
"""
Union of all instructions from PutNext, Open, Goto and PickUp. The agent
may need to move objects around. The agent may have to unlock the door,
but only if it is explicitly referred by the instruction.
Competencies: Maze, Unblock, Unlock, GoTo, PickUp, PutNext, Open
"""
def __init__(
self,
room_size=8,
num_rows=3,
num_cols=3,
num_dists=18,
seed=None,
**kwargs
):
# We add many distractors to increase the probability
# of ambiguous locations within the same room
super().__init__(
room_size=room_size,
num_rows=num_rows,
num_cols=num_cols,
num_dists=num_dists,
seed=seed,
instr_kinds=['action'],
locations=False,
unblocking=True,
implicit_unlock=False,
**kwargs
)
class Level_SynthS5R2(Level_Synth):
def __init__(self, seed=None, **kwargs):
super().__init__(
room_size=5,
num_rows=2,
num_cols=2,
num_dists=7,
seed=seed,
**kwargs
)
class Level_SynthLoc(LevelGen):
"""
Like Synth, but a significant share of object descriptions involves
location language like in PickUpLoc. No implicit unlocking.
Competencies: Maze, Unblock, Unlock, GoTo, PickUp, PutNext, Open, Loc
"""
def __init__(self, seed=None, **kwargs):
# We add many distractors to increase the probability
# of ambiguous locations within the same room
super().__init__(
seed=seed,
instr_kinds=['action'],
locations=True,
unblocking=True,
implicit_unlock=False,
**kwargs
)
class Level_SynthSeq(LevelGen):
"""
Like SynthLoc, but now with multiple commands, combined just like in GoToSeq.
No implicit unlocking.
Competencies: Maze, Unblock, Unlock, GoTo, PickUp, PutNext, Open, Loc, Seq
"""
def __init__(self, seed=None, **kwargs):
# We add many distractors to increase the probability
# of ambiguous locations within the same room
super().__init__(
seed=seed,
locations=True,
unblocking=True,
implicit_unlock=False,
**kwargs
)
class Level_MiniBossLevel(LevelGen):
def __init__(self, seed=None, **kwargs):
super().__init__(
seed=seed,
num_cols=2,
num_rows=2,
room_size=5,
num_dists=7,
locked_room_prob=0.25,
**kwargs
)
class Level_BossLevel(LevelGen):
def __init__(self, seed=None, **kwargs):
super().__init__(
seed=seed,
**kwargs
)
class Level_BossLevelNoUnlock(LevelGen):
def __init__(self, seed=None, **kwargs):
super().__init__(
seed=seed,
locked_room_prob=0,
implicit_unlock=False,
**kwargs
)
# Register the levels in this file
register_levels(__name__, globals())
| 30.205752
| 99
| 0.598184
| 5,128
| 40,959
| 4.422192
| 0.072933
| 0.03422
| 0.041231
| 0.039688
| 0.833135
| 0.807161
| 0.779997
| 0.769017
| 0.732284
| 0.714292
| 0
| 0.017454
| 0.293586
| 40,959
| 1,355
| 100
| 30.228044
| 0.766296
| 0.098538
| 0
| 0.716075
| 0
| 0
| 0.011642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137787
| false
| 0
| 0.002088
| 0.003132
| 0.286013
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3fb930b409e58ae71007bacb0d931810422dc05
| 3,398
|
py
|
Python
|
amazon/python/cut_off_trees_golf_event_675.py
|
Xiaoyu-Xing/algorithms
|
93936aeeef64487285db360b5884e844e0662b8e
|
[
"MIT"
] | null | null | null |
amazon/python/cut_off_trees_golf_event_675.py
|
Xiaoyu-Xing/algorithms
|
93936aeeef64487285db360b5884e844e0662b8e
|
[
"MIT"
] | null | null | null |
amazon/python/cut_off_trees_golf_event_675.py
|
Xiaoyu-Xing/algorithms
|
93936aeeef64487285db360b5884e844e0662b8e
|
[
"MIT"
] | null | null | null |
# BFS, Map, overtime
class Solution:
def cutOffTree(self, forest):
"""
:type forest: List[List[int]]
:rtype: int
"""
height = len(forest)
if height == 0:
return 0
width = len(forest[0])
if width == 0:
return 0
trees = []
for i in range(height):
for j in range(width):
if forest[i][j] > 0:
trees.append((forest[i][j], i, j))
trees.sort(key=lambda x: x[0])
# print(trees)
count = 0
sr = 0
sc = 0
for (_, tr, tc) in trees:
dist = self.dist(forest, sr, sc, tr, tc)
if dist < 0:
return -1
count += dist
sr, sc = tr, tc
return count
def dist(self, forest, sr, sc, tr, tc):
# print(sr, sc, tr, tc)
height = len(forest)
width = len(forest[0])
queue = collections.deque()
queue.append((sr, sc, 0))
seen = [[False for i in range(width)] for j in range(height)]
seen[sr][sc] = True
# print(seen)
# print(queue)
while queue:
(nowr, nowc, nowdist) = queue.popleft()
# print(nowr, nowc, nowdist)
if nowr == tr and nowc == tc:
return nowdist
for nr, nc in ((nowr - 1, nowc), (nowr + 1, nowc), (nowr, nowc - 1), (nowr, nowc + 1)):
if (0 <= nr < height and 0 <= nc < width and seen[nr][nc] == False and forest[nr][nc]):
seen[nowr][nowc] = True
queue.append((nr, nc, nowdist + 1))
# print(queue)
return -1
# My method, 8s
class Solution:
def cutOffTree(self, forest):
"""
:type forest: List[List[int]]
:rtype: int
"""
height = len(forest)
if height == 0:
return 0
width = len(forest[0])
if width == 0:
return 0
trees = []
for i in range(height):
for j in range(width):
if forest[i][j] > 0:
trees.append((forest[i][j], i, j))
trees.sort(key=lambda x: x[0])
# print(trees)
count = 0
sr = 0
sc = 0
for (_, tr, tc) in trees:
dist = self.dist(forest, sr, sc, tr, tc)
if dist < 0:
return -1
count += dist
sr, sc = tr, tc
return count
def dist(self, forest, sr, sc, tr, tc):
# print(sr, sc, tr, tc)
height = len(forest)
width = len(forest[0])
queue = collections.deque()
queue.append((sr, sc, 0))
seen = [[False for i in range(width)] for j in range(height)]
# seen[sr][sc] = True
# print(seen)
# print(queue)
while len(queue) != 0:
(nowr, nowc, nowdist) = queue.popleft()
# print(nowr, nowc, nowdist)
if nowr < 0 or nowr >= height or nowc < 0 or nowc >= width or seen[nowr][nowc] or forest[nowr][nowc] <= 0:
continue
if nowr == tr and nowc == tc:
return nowdist
seen[nowr][nowc] = True
queue.extend([(nowr + i, nowc + j, nowdist + 1)
for i, j in zip([1, -1, 0, 0], [0, 0, 1, -1])])
# print(queue)
return -1
| 31.174312
| 118
| 0.444379
| 428
| 3,398
| 3.523364
| 0.13785
| 0.03183
| 0.03183
| 0.04244
| 0.82626
| 0.774536
| 0.774536
| 0.774536
| 0.738727
| 0.738727
| 0
| 0.025981
| 0.422307
| 3,398
| 108
| 119
| 31.462963
| 0.742231
| 0.099176
| 0
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f01ec0e1f1f5ab17f6c13ec9e9a9fdff6a860ee
| 5,531
|
py
|
Python
|
test/test_binary_max_heap.py
|
kisliakovsky/structures
|
19969470a7e9b150b077082cc8ca0c2fc9be279e
|
[
"MIT"
] | null | null | null |
test/test_binary_max_heap.py
|
kisliakovsky/structures
|
19969470a7e9b150b077082cc8ca0c2fc9be279e
|
[
"MIT"
] | null | null | null |
test/test_binary_max_heap.py
|
kisliakovsky/structures
|
19969470a7e9b150b077082cc8ca0c2fc9be279e
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from src.heap import Heap, MaxIntKey, Entry
class TestBinaryMaxHeap(TestCase):
def test_init(self):
heap = Heap[MaxIntKey, str](2, [
Entry(MaxIntKey(14), 'A'),
Entry(MaxIntKey(5), 'B'),
Entry(MaxIntKey(7), 'C'),
Entry(MaxIntKey(18), 'D'),
Entry(MaxIntKey(42), 'E'),
Entry(MaxIntKey(12), 'F'),
Entry(MaxIntKey(11), 'G'),
Entry(MaxIntKey(18), 'H'),
Entry(MaxIntKey(29), 'I')
])
self.assertEqual([
Entry(MaxIntKey(42), 'E'),
Entry(MaxIntKey(29), 'I'),
Entry(MaxIntKey(12), 'F'),
Entry(MaxIntKey(18), 'H'),
Entry(MaxIntKey(5), 'B'),
Entry(MaxIntKey(7), 'C'),
Entry(MaxIntKey(11), 'G'),
Entry(MaxIntKey(14), 'A'),
Entry(MaxIntKey(18), 'D')
], heap.as_list())
def test_push(self):
heap = Heap[MaxIntKey, str](2, [])
heap.push(Entry(MaxIntKey(14), 'A'))
heap.push(Entry(MaxIntKey(5), 'B'))
heap.push(Entry(MaxIntKey(7), 'C'))
heap.push(Entry(MaxIntKey(18), 'D'))
heap.push(Entry(MaxIntKey(42), 'E'))
heap.push(Entry(MaxIntKey(12), 'F'))
heap.push(Entry(MaxIntKey(11), 'G'))
heap.push(Entry(MaxIntKey(18), 'H'))
heap.push(Entry(MaxIntKey(29), 'I'))
self.assertEqual([
Entry(MaxIntKey(42), 'E'),
Entry(MaxIntKey(29), 'I'),
Entry(MaxIntKey(12), 'F'),
Entry(MaxIntKey(18), 'D'),
Entry(MaxIntKey(14), 'A'),
Entry(MaxIntKey(7), 'C'),
Entry(MaxIntKey(11), 'G'),
Entry(MaxIntKey(5), 'B'),
Entry(MaxIntKey(18), 'H')
], heap.as_list())
def test_pop(self):
heap = Heap[MaxIntKey, str](2, [])
heap.push(Entry(MaxIntKey(14), 'A'))
heap.push(Entry(MaxIntKey(5), 'B'))
heap.push(Entry(MaxIntKey(7), 'C'))
heap.push(Entry(MaxIntKey(18), 'D'))
heap.push(Entry(MaxIntKey(42), 'E'))
heap.push(Entry(MaxIntKey(12), 'F'))
heap.push(Entry(MaxIntKey(11), 'G'))
heap.push(Entry(MaxIntKey(18), 'H'))
heap.push(Entry(MaxIntKey(29), 'I'))
self.assertEqual(Entry(MaxIntKey(42), 'E'), heap.pop())
self.assertEqual(Entry(MaxIntKey(29), 'I'), heap.pop())
self.assertEqual(Entry(MaxIntKey(18), 'H'), heap.pop())
self.assertEqual(Entry(MaxIntKey(18), 'D'), heap.pop())
self.assertEqual(Entry(MaxIntKey(14), 'A'), heap.pop())
self.assertEqual(Entry(MaxIntKey(12), 'F'), heap.pop())
self.assertEqual(Entry(MaxIntKey(11), 'G'), heap.pop())
self.assertEqual(Entry(MaxIntKey(7), 'C'), heap.pop())
self.assertEqual(Entry(MaxIntKey(5), 'B'), heap.pop())
def test_peek(self):
heap = Heap[MaxIntKey, str](2, [])
heap.push(Entry(MaxIntKey(14), 'A'))
heap.push(Entry(MaxIntKey(5), 'B'))
heap.push(Entry(MaxIntKey(7), 'C'))
heap.push(Entry(MaxIntKey(18), 'D'))
heap.push(Entry(MaxIntKey(42), 'E'))
heap.push(Entry(MaxIntKey(12), 'F'))
heap.push(Entry(MaxIntKey(11), 'G'))
heap.push(Entry(MaxIntKey(18), 'H'))
heap.push(Entry(MaxIntKey(29), 'I'))
self.assertEqual(Entry(MaxIntKey(42), 'E'), heap.peek())
self.assertEqual([
Entry(MaxIntKey(42), 'E'),
Entry(MaxIntKey(29), 'I'),
Entry(MaxIntKey(12), 'F'),
Entry(MaxIntKey(18), 'D'),
Entry(MaxIntKey(14), 'A'),
Entry(MaxIntKey(7), 'C'),
Entry(MaxIntKey(11), 'G'),
Entry(MaxIntKey(5), 'B'),
Entry(MaxIntKey(18), 'H')
], heap.as_list())
def test_change_key(self):
heap = Heap[MaxIntKey, str](2, [])
heap.push(Entry(MaxIntKey(14), 'A'))
heap.push(Entry(MaxIntKey(5), 'B'))
heap.push(Entry(MaxIntKey(7), 'C'))
heap.push(Entry(MaxIntKey(18), 'D'))
heap.push(Entry(MaxIntKey(42), 'E'))
heap.push(Entry(MaxIntKey(12), 'F'))
heap.push(Entry(MaxIntKey(11), 'G'))
heap.push(Entry(MaxIntKey(18), 'H'))
heap.push(Entry(MaxIntKey(29), 'I'))
with self.assertRaises(IndexError):
heap.change_key(9, MaxIntKey(20))
heap.change_key(8, MaxIntKey(43))
self.assertEqual(Entry(MaxIntKey(43), 'H'), heap.peek())
def test_delete(self):
heap = Heap[MaxIntKey, str](2, [])
heap.push(Entry(MaxIntKey(14), 'A'))
heap.push(Entry(MaxIntKey(5), 'B'))
heap.push(Entry(MaxIntKey(7), 'C'))
heap.push(Entry(MaxIntKey(18), 'D'))
heap.push(Entry(MaxIntKey(42), 'E'))
heap.push(Entry(MaxIntKey(12), 'F'))
heap.push(Entry(MaxIntKey(11), 'G'))
heap.push(Entry(MaxIntKey(18), 'H'))
heap.push(Entry(MaxIntKey(29), 'I'))
del heap[8]
self.assertEqual([
Entry(MaxIntKey(42), 'E'),
Entry(MaxIntKey(29), 'I'),
Entry(MaxIntKey(12), 'F'),
Entry(MaxIntKey(18), 'D'),
Entry(MaxIntKey(14), 'A'),
Entry(MaxIntKey(7), 'C'),
Entry(MaxIntKey(11), 'G'),
Entry(MaxIntKey(5), 'B')
], heap.as_list())
def test_is_empty(self):
heap = Heap[MaxIntKey, str](2, [])
self.assertTrue(heap.is_empty())
heap.push(Entry(MaxIntKey(14), 'A'))
self.assertFalse(heap.is_empty())
| 38.409722
| 64
| 0.527029
| 668
| 5,531
| 4.338323
| 0.080838
| 0.487923
| 0.206349
| 0.349206
| 0.883368
| 0.864044
| 0.728433
| 0.702208
| 0.702208
| 0.702208
| 0
| 0.048115
| 0.271018
| 5,531
| 143
| 65
| 38.678322
| 0.670635
| 0
| 0
| 0.759399
| 0
| 0
| 0.018261
| 0
| 0
| 0
| 0
| 0
| 0.135338
| 1
| 0.052632
| false
| 0
| 0.015038
| 0
| 0.075188
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7f02b14f33fc9149193ced91492fc6be6486074a
| 11,489
|
py
|
Python
|
tests/commands/test_destroy.py
|
riddopic/opta
|
25fa6435fdc7e2ea9c7963ed74100fffb0743063
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_destroy.py
|
riddopic/opta
|
25fa6435fdc7e2ea9c7963ed74100fffb0743063
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_destroy.py
|
riddopic/opta
|
25fa6435fdc7e2ea9c7963ed74100fffb0743063
|
[
"Apache-2.0"
] | null | null | null |
import os
from click.testing import CliRunner
from pytest_mock import MockFixture
from opta.commands.destroy import destroy
from opta.constants import TF_PLAN_PATH
from opta.exceptions import UserErrors
from opta.layer import Layer
from opta.module import Module
from tests.util import get_call_args
FAKE_ENV_CONFIG = os.path.join(
os.getcwd(), "tests", "fixtures", "dummy_data", "dummy_config_parent.yaml"
)
FAKE_SERVICE_CONFIG = os.path.join(
os.getcwd(), "tests", "fixtures", "dummy_data", "dummy_config1.yaml"
)
FAKE_SERVICE_CONFIG_MULTIPLE_ENV = os.path.join(
os.getcwd(), "tests", "fixtures", "dummy_data", "dummy_config_2_env.yml",
)
def test_destroy_env_with_children(mocker: MockFixture) -> None:
mocker.patch("opta.commands.destroy.opta_acquire_lock")
mocker.patch(
"opta.commands.destroy.Terraform.tf_lock_details", return_value=(False, ""),
)
mocker.patch("opta.commands.destroy.amplitude_client.send_event")
mocker.patch("opta.commands.destroy.Terraform.init")
mocker.patch("opta.commands.destroy.Terraform.download_state", return_value=True)
mocker.patch(
"opta.commands.destroy._aws_get_configs", return_value=["a", "b"],
)
mocked_gen_all = mocker.patch("opta.commands.destroy.gen_all")
mocker.patch("opta.commands.destroy.Layer.verify_cloud_credentials")
mocker.patch("opta.commands.destroy.opta_release_lock")
runner = CliRunner()
result = runner.invoke(destroy, ["--config", FAKE_ENV_CONFIG])
assert result.exit_code == 1
assert isinstance(result.exception, UserErrors)
assert not mocked_gen_all.called
def test_destroy_env_without_children(mocker: MockFixture) -> None:
mocker.patch("opta.commands.destroy.opta_acquire_lock")
mocked_os_path_exists = mocker.patch("opta.utils.os.path.exists")
mocked_os_path_exists.return_value = True
mock_modules = [mocker.Mock(spec=Module) for _ in range(3)]
for i, module in enumerate(mock_modules):
module.name = f"fake_module_{i}"
mock_layer = mocker.Mock(spec=Layer)
mock_layer.name = "dummy-parent"
mock_layer.cloud = "aws"
mock_layer.modules = mock_modules
mocker.patch("opta.commands.destroy.Layer.load_from_yaml", return_value=mock_layer)
mocker.patch(
"opta.commands.destroy.Terraform.tf_lock_details", return_value=(False, "")
)
mocker.patch("opta.commands.destroy._fetch_children_layers", return_value=None)
mocker.patch("opta.commands.destroy.click.confirm", return_value=True)
mocker.patch("opta.commands.destroy.amplitude_client.send_event")
mocker.patch("opta.commands.destroy.Terraform.init")
mocker.patch(
"opta.commands.destroy.Terraform.get_existing_modules", return_value={"base"}
)
mocker.patch("opta.commands.destroy.Terraform.download_state", return_value=True)
mocker.patch("opta.commands.destroy.Layer.verify_cloud_credentials")
mocker.patch(
"opta.commands.destroy._aws_get_configs", return_value=[],
)
mock_terraform_init = mocker.patch(
"opta.commands.destroy.Terraform.init", return_value=None
)
mock_terraform_refresh = mocker.patch(
"opta.commands.destroy.Terraform.refresh", return_value=None
)
mock_terraform_plan = mocker.patch(
"opta.commands.destroy.Terraform.plan", return_value=None
)
mock_terraform_plan_displayer = mocker.patch(
"opta.commands.destroy.PlanDisplayer.display", return_value=None
)
mock_terraform_apply = mocker.patch(
"opta.commands.destroy.Terraform.apply", return_value=None
)
mock_terraform_delete_storage_state = mocker.patch(
"opta.commands.destroy.Terraform.delete_state_storage", return_value=None
)
mocker.patch(
"opta.core.terraform.Terraform.get_existing_modules",
return_value={"fake_module_2", "fake_module_1", "fake_module_0"},
)
mocked_gen_all = mocker.patch("opta.commands.destroy.gen_all")
mocker.patch("opta.commands.destroy.opta_release_lock")
runner = CliRunner()
result = runner.invoke(destroy, ["--config", FAKE_ENV_CONFIG])
assert result.exit_code == 0
mock_terraform_init.assert_called_once_with(False, "-reconfigure", layer=mock_layer)
mock_terraform_refresh.assert_called_once_with(mock_layer)
mock_terraform_plan.assert_has_calls(
[
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_2"]),
),
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_1"]),
),
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_0"]),
),
]
)
mock_terraform_plan_displayer.assert_has_calls(
[
mocker.call(detailed_plan=False),
mocker.call(detailed_plan=False),
mocker.call(detailed_plan=False),
]
)
mock_terraform_apply.assert_has_calls(
[
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
]
)
mock_terraform_delete_storage_state.assert_called_once_with(mock_layer)
args = get_call_args(mocked_gen_all)
assert len(args) == 1
assert args[0].name == "dummy-parent"
def test_destroy_service(mocker: MockFixture) -> None:
mocker.patch("opta.commands.destroy.opta_acquire_lock")
mocked_os_path_exists = mocker.patch("opta.utils.os.path.exists")
mocked_os_path_exists.return_value = True
mock_modules = [mocker.Mock(spec=Module) for _ in range(3)]
for i, module in enumerate(mock_modules):
module.name = f"fake_module_{i}"
mock_layer = mocker.Mock(spec=Layer)
mock_layer.name = "dummy-config"
mock_layer.cloud = "aws"
mock_layer.modules = mock_modules
mocker.patch("opta.commands.destroy.Layer.load_from_yaml", return_value=mock_layer)
mocker.patch(
"opta.commands.destroy.Terraform.tf_lock_details", return_value=(False, "")
)
mocker.patch("opta.commands.destroy._fetch_children_layers", return_value=None)
mocker.patch("opta.commands.destroy.click.confirm", return_value=True)
mocker.patch("opta.commands.destroy.amplitude_client.send_event")
mocker.patch("opta.commands.destroy.Terraform.init")
mocker.patch(
"opta.commands.destroy.Terraform.get_existing_modules", return_value={"base"}
)
mocker.patch("opta.commands.destroy.Terraform.download_state", return_value=True)
mocker.patch("opta.commands.destroy.Layer.verify_cloud_credentials")
mocker.patch(
"opta.commands.destroy._aws_get_configs", return_value=[],
)
mock_terraform_init = mocker.patch(
"opta.commands.destroy.Terraform.init", return_value=None
)
mock_terraform_refresh = mocker.patch(
"opta.commands.destroy.Terraform.refresh", return_value=None
)
mock_terraform_plan = mocker.patch(
"opta.commands.destroy.Terraform.plan", return_value=None
)
mock_terraform_plan_displayer = mocker.patch(
"opta.commands.destroy.PlanDisplayer.display", return_value=None
)
mock_terraform_apply = mocker.patch(
"opta.commands.destroy.Terraform.apply", return_value=None
)
mock_terraform_delete_storage_state = mocker.patch(
"opta.commands.destroy.Terraform.delete_state_storage", return_value=None
)
mocker.patch(
"opta.core.terraform.Terraform.get_existing_modules",
return_value={"fake_module_2", "fake_module_1", "fake_module_0"},
)
mocked_gen_all = mocker.patch("opta.commands.destroy.gen_all")
mocker.patch("opta.commands.destroy.opta_release_lock")
runner = CliRunner()
result = runner.invoke(destroy, ["--config", FAKE_ENV_CONFIG])
assert result.exit_code == 0
mock_terraform_init.assert_called_once_with(False, "-reconfigure", layer=mock_layer)
mock_terraform_refresh.assert_called_once_with(mock_layer)
mock_terraform_plan.assert_has_calls(
[
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_2"]),
),
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_1"]),
),
mocker.call(
"-lock=false",
"-input=false",
"-destroy",
f"-out={TF_PLAN_PATH}",
layer=mock_layer,
*list(["-target=module.fake_module_0"]),
),
]
)
mock_terraform_plan_displayer.assert_has_calls(
[
mocker.call(detailed_plan=False),
mocker.call(detailed_plan=False),
mocker.call(detailed_plan=False),
]
)
mock_terraform_apply.assert_has_calls(
[
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
mocker.call(mock_layer, TF_PLAN_PATH, no_init=True, quiet=False),
]
)
mock_terraform_delete_storage_state.assert_called_once_with(mock_layer)
args = get_call_args(mocked_gen_all)
assert len(args) == 1
assert args[0].name == "dummy-config"
def test_destroy_service_single_env_wrong_input(mocker: MockFixture) -> None:
mocker.patch("opta.commands.destroy.amplitude_client.send_event")
mocker.patch("opta.commands.destroy.Terraform.init")
mocker.patch("opta.commands.destroy.Terraform.download_state", return_value=True)
mocker.patch("opta.commands.destroy.Layer.verify_cloud_credentials")
mocker.patch(
"opta.commands.destroy._aws_get_configs", return_value=[],
)
runner = CliRunner()
"""Actual ENV present in the Service YML is dummy-env"""
result = runner.invoke(destroy, ["--config", FAKE_SERVICE_CONFIG, "--env", "dummy"])
assert result.exit_code == 1
def test_destroy_service_multiple_env_wrong_input(mocker: MockFixture) -> None:
mocker.patch("opta.commands.destroy.amplitude_client.send_event")
mocker.patch("opta.commands.destroy.Terraform.init")
mocker.patch("opta.commands.destroy.Terraform.download_state", return_value=True)
mocker.patch("opta.commands.destroy.Layer.verify_cloud_credentials")
mocker.patch(
"opta.commands.destroy._aws_get_configs", return_value=[],
)
runner = CliRunner()
"""Actual ENV present in the Service YML are (dummy-env, dummy-env-2)"""
result = runner.invoke(
destroy, ["--config", FAKE_SERVICE_CONFIG_MULTIPLE_ENV, "--env", "dummy"]
)
assert result.exit_code == 1
| 35.903125
| 88
| 0.671077
| 1,389
| 11,489
| 5.263499
| 0.094312
| 0.09178
| 0.125154
| 0.179319
| 0.925455
| 0.919984
| 0.919984
| 0.91205
| 0.898919
| 0.898919
| 0
| 0.002856
| 0.20759
| 11,489
| 319
| 89
| 36.015674
| 0.800198
| 0
| 0
| 0.709434
| 0
| 0
| 0.299798
| 0.2432
| 0
| 0
| 0
| 0
| 0.086792
| 1
| 0.018868
| false
| 0
| 0.033962
| 0
| 0.05283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7f246736e98c2f2f1ca088eb30504fcdaa5dd20d
| 7,915
|
py
|
Python
|
test/regression/daily/systest_pte.py
|
drakejund/contract
|
e1dbfcff94f348a0040062fd04c48b0e42b64762
|
[
"Apache-2.0"
] | 172
|
2017-10-12T07:56:32.000Z
|
2022-03-10T11:08:02.000Z
|
test/regression/daily/systest_pte.py
|
Cielo2017/hyperledger-fabric-gm
|
40f2d1d03a96872b52cf6c5ba8a5c634e36089a6
|
[
"Apache-2.0"
] | 17
|
2019-01-08T07:21:18.000Z
|
2020-11-17T03:46:27.000Z
|
test/regression/daily/systest_pte.py
|
Cielo2017/hyperledger-fabric-gm
|
40f2d1d03a96872b52cf6c5ba8a5c634e36089a6
|
[
"Apache-2.0"
] | 89
|
2017-09-14T04:38:56.000Z
|
2021-05-21T17:24:48.000Z
|
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
######################################################################
# To execute:
# Install: sudo apt-get install python python-pytest
# Run on command line: py.test -v --junitxml results.xml ./systest_pte.py
import unittest
import subprocess
TEST_PASS_STRING="RESULT=PASS"
######################################################################
### LEVELDB
######################################################################
class Perf_Stress_LevelDB(unittest.TestCase):
@unittest.skip("skipping")
def test_FAB3808_TPS_Queries_1_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
calculate tps, and remove network and cleanup
'''
# Replace TestPlaceholder.sh with actual test name, something like:
# ../../tools/PTE/tests/runSkeletonQueriesLevel.sh
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3811_TPS_Invokes_1_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
query the ledger to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3833_TPS_Queries_8_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
calculate tps, and remove network and cleanup
'''
# Replace TestPlaceholder.sh with actual test name, something like:
# ../../tools/PTE/tests/runSkeletonQueriesLevel.sh
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3835_TPS_Invokes_8_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
query the ledger to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
######################################################################
### COUCHDB
######################################################################
class Perf_Stress_CouchDB(unittest.TestCase):
@unittest.skip("skipping")
def test_FAB3807_TPS_Queries_1_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
calculate tps, and remove network and cleanup
'''
# Replace TestPlaceholder.sh with actual test name, something like:
# ../../tools/PTE/tests/runSkeletonQueriesCouch.sh
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3810_TPS_Invokes_1_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
query the ledger to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3832_TPS_Queries_8_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
calculate tps, and remove network and cleanup
'''
# Replace TestPlaceholder.sh with actual test name, something like:
# ../../tools/PTE/tests/runSkeletonQueriesCouch.sh
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3834_TPS_Invokes_8_Thread_TinyNtwk(self):
'''
Tiny Network: 1 Ord, 1 KB, 1 ZK, 2 Org, 2 Peers, 1 Chan, 1 CC, 2 thrds
Launch tiny network, use PTE in STRESS mode to continuously
send 10000 query transactions concurrently to 1 peer in both orgs,
query the ledger to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3813_Baseline_StandardNtwk_8_Thread(self):
'''
"Standard Network": 2 Orderers, 3 KafkaBrokers, 3 ZooKeepers,
2 Certificate Authorities (CAs - 1 per Org), 2 Organizations,
2 Peers per Org, 4 Peers, 2 Channels, 2 ChainCodes, 8 total threads.
Launch network, use PTE stress mode to send 10000 invoke transactions
concurrently to a peer in each org on all channels on all chaincodes,
query the ledger for each to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3814_Payload_1Meg(self):
'''
Standard Network: 2 Orderers, 3 KafkaBrokers, 3 ZooKeepers,
2 Certificate Authorities (CAs - 1 per Org), 2 Organizations,
2 Peers per Org, 4 Peers, 2 Channels, 2 ChainCodes, 8 total threads.
Launch network, use PTE stress mode to send 10000 invoke transactions
concurrently to a peer in each org on all channels on all chaincodes,
query the ledger for each to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
@unittest.skip("skipping")
def test_FAB3816_GossipStress_10_PeersPerOrg(self):
'''
Standard Network plus extra peers: 2 Orderers, 3 KafkaBrokers, 3 ZKs,
2 Certificate Authorities (CAs - 1 per Org), 2 Organizations,
10 Peers per Org, 4 Peers, 2 Channels, 2 ChainCodes, 8 total threads.
Launch network, use PTE stress mode to send 10000 invoke transactions
concurrently to a peer in each org on all channels on all chaincodes,
query the ledger for each to ensure the last transaction was written,
calculate tps, remove network and cleanup
'''
result = subprocess.check_output("./TestPlaceholder.sh", shell=True)
self.assertIn(TEST_PASS_STRING, result)
| 46.558824
| 78
| 0.650158
| 1,017
| 7,915
| 4.961652
| 0.143559
| 0.034879
| 0.033294
| 0.047562
| 0.909631
| 0.905073
| 0.905073
| 0.88803
| 0.88803
| 0.878716
| 0
| 0.036026
| 0.231965
| 7,915
| 169
| 79
| 46.83432
| 0.794045
| 0.533923
| 0
| 0.673469
| 0
| 0
| 0.118323
| 0
| 0
| 0
| 0
| 0
| 0.22449
| 1
| 0.22449
| false
| 0.244898
| 0.040816
| 0
| 0.306122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6165d879fef25868f93117273e33aa9df8de6019
| 56,696
|
py
|
Python
|
arelle/plugin/cssfPlausibilityChecks.py
|
gmongelli/Arelle
|
cdc7c0589f738f7588ffa483855d4b1f0c2dbd16
|
[
"Apache-2.0"
] | null | null | null |
arelle/plugin/cssfPlausibilityChecks.py
|
gmongelli/Arelle
|
cdc7c0589f738f7588ffa483855d4b1f0c2dbd16
|
[
"Apache-2.0"
] | null | null | null |
arelle/plugin/cssfPlausibilityChecks.py
|
gmongelli/Arelle
|
cdc7c0589f738f7588ffa483855d4b1f0c2dbd16
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on Jan 4, 2015
@author: Gregorio Mongelli (Acsone S. A.)
(c) Copyright 2015 Acsone S. A., All rights reserved.
'''
import os, sys, time, traceback
from arelle import ModelDocument, RenderingEvaluator
from arelle.ModelDocument import Type
from arelle.XbrlConst import assertionSet
from arelle.FileSource import openFileSource
from arelle.Locale import format_string
linkbaseReferences = {'aset-c_01.00.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_01.00.xml',
'aset-c_02.00_c_04.00.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_02.00_c_04.00.xml',
'aset-c_03.00.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_03.00.xml',
'aset-c_04.00.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_04.00.xml',
'aset-c_05.01.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_05.01.xml',
'aset-c_26.00.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/aset-c_26.00.xml',
'cssf-find-prec.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/cssf-find-prec.xml',
'vr_cssf001_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf001_m-err-en.xml',
'vr_cssf001_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf001_m-lab-en.xml',
'vr_cssf002_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf002_m-err-en.xml',
'vr_cssf002_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf002_m-lab-en.xml',
'vr_cssf003_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf003_m-err-en.xml',
'vr_cssf003_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf003_m-lab-en.xml',
'vr_cssf007_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf007_p-err-en.xml',
'vr_cssf007_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf007_p-lab-en.xml',
'vr_cssf008_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf008_p-err-en.xml',
'vr_cssf008_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf008_p-lab-en.xml',
'vr_cssf009_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf009_p-err-en.xml',
'vr_cssf009_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf009_p-lab-en.xml',
'vr_cssf010_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf010_m-err-en.xml',
'vr_cssf010_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf010_m-lab-en.xml',
'vr_cssf011_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf011_m-err-en.xml',
'vr_cssf011_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf011_m-lab-en.xml',
'vr_cssf012_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf012_m-err-en.xml',
'vr_cssf012_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf012_m-lab-en.xml',
'vr_cssf013_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf013_m-err-en.xml',
'vr_cssf013_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf013_m-lab-en.xml',
'vr_cssf014_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf014_m-err-en.xml',
'vr_cssf014_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf014_m-lab-en.xml',
'vr_cssf015_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf015_m-err-en.xml',
'vr_cssf015_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf015_m-lab-en.xml',
'vr_cssf016_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf016_m-err-en.xml',
'vr_cssf016_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf016_m-lab-en.xml',
'vr_cssf017aa_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aa_m-err-en.xml',
'vr_cssf017aa_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aa_m-lab-en.xml',
'vr_cssf017ab_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ab_m-err-en.xml',
'vr_cssf017ab_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ab_m-lab-en.xml',
'vr_cssf017ac_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ac_m-err-en.xml',
'vr_cssf017ac_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ac_m-lab-en.xml',
'vr_cssf017ad_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ad_m-err-en.xml',
'vr_cssf017ad_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ad_m-lab-en.xml',
'vr_cssf017ae_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ae_m-err-en.xml',
'vr_cssf017ae_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ae_m-lab-en.xml',
'vr_cssf017af_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017af_m-err-en.xml',
'vr_cssf017af_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017af_m-lab-en.xml',
'vr_cssf017ag_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ag_m-err-en.xml',
'vr_cssf017ag_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ag_m-lab-en.xml',
'vr_cssf017ah_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ah_m-err-en.xml',
'vr_cssf017ah_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ah_m-lab-en.xml',
'vr_cssf017ai_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ai_m-err-en.xml',
'vr_cssf017ai_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ai_m-lab-en.xml',
'vr_cssf017aj_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aj_m-err-en.xml',
'vr_cssf017aj_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aj_m-lab-en.xml',
'vr_cssf017ak_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ak_m-err-en.xml',
'vr_cssf017ak_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ak_m-lab-en.xml',
'vr_cssf017al_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017al_m-err-en.xml',
'vr_cssf017al_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017al_m-lab-en.xml',
'vr_cssf017am_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017am_m-err-en.xml',
'vr_cssf017am_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017am_m-lab-en.xml',
'vr_cssf017an_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017an_m-err-en.xml',
'vr_cssf017an_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017an_m-lab-en.xml',
'vr_cssf017ao_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ao_m-err-en.xml',
'vr_cssf017ao_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ao_m-lab-en.xml',
'vr_cssf017ap_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ap_m-err-en.xml',
'vr_cssf017ap_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ap_m-lab-en.xml',
'vr_cssf017aq_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aq_m-err-en.xml',
'vr_cssf017aq_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aq_m-lab-en.xml',
'vr_cssf017ar_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ar_m-err-en.xml',
'vr_cssf017ar_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ar_m-lab-en.xml',
'vr_cssf017as_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017as_m-err-en.xml',
'vr_cssf017as_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017as_m-lab-en.xml',
'vr_cssf017at_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017at_m-err-en.xml',
'vr_cssf017at_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017at_m-lab-en.xml',
'vr_cssf017au_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017au_m-err-en.xml',
'vr_cssf017au_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017au_m-lab-en.xml',
'vr_cssf017av_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017av_m-err-en.xml',
'vr_cssf017av_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017av_m-lab-en.xml',
'vr_cssf017aw_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aw_m-err-en.xml',
'vr_cssf017aw_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017aw_m-lab-en.xml',
'vr_cssf017ax_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ax_m-err-en.xml',
'vr_cssf017ax_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ax_m-lab-en.xml',
'vr_cssf017ay_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ay_m-err-en.xml',
'vr_cssf017ay_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ay_m-lab-en.xml',
'vr_cssf017az_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017az_m-err-en.xml',
'vr_cssf017az_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017az_m-lab-en.xml',
'vr_cssf017ba_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ba_m-err-en.xml',
'vr_cssf017ba_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017ba_m-lab-en.xml',
'vr_cssf017bb_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bb_m-err-en.xml',
'vr_cssf017bb_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bb_m-lab-en.xml',
'vr_cssf017bc_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bc_m-err-en.xml',
'vr_cssf017bc_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bc_m-lab-en.xml',
'vr_cssf017bd_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bd_m-err-en.xml',
'vr_cssf017bd_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bd_m-lab-en.xml',
'vr_cssf017be_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017be_m-err-en.xml',
'vr_cssf017be_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017be_m-lab-en.xml',
'vr_cssf017bf_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bf_m-err-en.xml',
'vr_cssf017bf_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bf_m-lab-en.xml',
'vr_cssf017bg_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bg_m-err-en.xml',
'vr_cssf017bg_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bg_m-lab-en.xml',
'vr_cssf017bh_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bh_m-err-en.xml',
'vr_cssf017bh_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bh_m-lab-en.xml',
'vr_cssf017bi_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bi_m-err-en.xml',
'vr_cssf017bi_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bi_m-lab-en.xml',
'vr_cssf017bj_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bj_m-err-en.xml',
'vr_cssf017bj_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bj_m-lab-en.xml',
'vr_cssf017bk_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bk_m-err-en.xml',
'vr_cssf017bk_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bk_m-lab-en.xml',
'vr_cssf017bl_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bl_m-err-en.xml',
'vr_cssf017bl_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bl_m-lab-en.xml',
'vr_cssf017bm_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bm_m-err-en.xml',
'vr_cssf017bm_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bm_m-lab-en.xml',
'vr_cssf017bn_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bn_m-err-en.xml',
'vr_cssf017bn_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bn_m-lab-en.xml',
'vr_cssf017bo_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bo_m-err-en.xml',
'vr_cssf017bo_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bo_m-lab-en.xml',
'vr_cssf017bp_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bp_m-err-en.xml',
'vr_cssf017bp_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bp_m-lab-en.xml',
'vr_cssf017bq_m-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bq_m-err-en.xml',
'vr_cssf017bq_m-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf017bq_m-lab-en.xml',
'vr_cssf018a0_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a0_p-err-en.xml',
'vr_cssf018a0_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a0_p-lab-en.xml',
'vr_cssf018a1_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a1_p-err-en.xml',
'vr_cssf018a1_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a1_p-lab-en.xml',
'vr_cssf018a2_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a2_p-err-en.xml',
'vr_cssf018a2_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a2_p-lab-en.xml',
'vr_cssf018a3_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a3_p-err-en.xml',
'vr_cssf018a3_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a3_p-lab-en.xml',
'vr_cssf018a4_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a4_p-err-en.xml',
'vr_cssf018a4_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a4_p-lab-en.xml',
'vr_cssf018a5_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a5_p-err-en.xml',
'vr_cssf018a5_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a5_p-lab-en.xml',
'vr_cssf018a6_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a6_p-err-en.xml',
'vr_cssf018a6_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a6_p-lab-en.xml',
'vr_cssf018a7_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a7_p-err-en.xml',
'vr_cssf018a7_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a7_p-lab-en.xml',
'vr_cssf018a8_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a8_p-err-en.xml',
'vr_cssf018a8_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a8_p-lab-en.xml',
'vr_cssf018a9_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a9_p-err-en.xml',
'vr_cssf018a9_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018a9_p-lab-en.xml',
'vr_cssf018aa_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aa_p-err-en.xml',
'vr_cssf018aa_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aa_p-lab-en.xml',
'vr_cssf018ab_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ab_p-err-en.xml',
'vr_cssf018ab_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ab_p-lab-en.xml',
'vr_cssf018ac_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ac_p-err-en.xml',
'vr_cssf018ac_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ac_p-lab-en.xml',
'vr_cssf018ae_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ae_p-err-en.xml',
'vr_cssf018ae_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ae_p-lab-en.xml',
'vr_cssf018af_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018af_p-err-en.xml',
'vr_cssf018af_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018af_p-lab-en.xml',
'vr_cssf018ah_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ah_p-err-en.xml',
'vr_cssf018ah_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ah_p-lab-en.xml',
'vr_cssf018ai_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ai_p-err-en.xml',
'vr_cssf018ai_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ai_p-lab-en.xml',
'vr_cssf018aj_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aj_p-err-en.xml',
'vr_cssf018aj_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aj_p-lab-en.xml',
'vr_cssf018ak_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ak_p-err-en.xml',
'vr_cssf018ak_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ak_p-lab-en.xml',
'vr_cssf018al_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018al_p-err-en.xml',
'vr_cssf018al_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018al_p-lab-en.xml',
'vr_cssf018am_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018am_p-err-en.xml',
'vr_cssf018am_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018am_p-lab-en.xml',
'vr_cssf018an_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018an_p-err-en.xml',
'vr_cssf018an_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018an_p-lab-en.xml',
'vr_cssf018ao_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ao_p-err-en.xml',
'vr_cssf018ao_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ao_p-lab-en.xml',
'vr_cssf018ap_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ap_p-err-en.xml',
'vr_cssf018ap_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ap_p-lab-en.xml',
'vr_cssf018aq_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aq_p-err-en.xml',
'vr_cssf018aq_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aq_p-lab-en.xml',
'vr_cssf018ar_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ar_p-err-en.xml',
'vr_cssf018ar_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ar_p-lab-en.xml',
'vr_cssf018as_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018as_p-err-en.xml',
'vr_cssf018as_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018as_p-lab-en.xml',
'vr_cssf018at_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018at_p-err-en.xml',
'vr_cssf018at_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018at_p-lab-en.xml',
'vr_cssf018au_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018au_p-err-en.xml',
'vr_cssf018au_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018au_p-lab-en.xml',
'vr_cssf018av_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018av_p-err-en.xml',
'vr_cssf018av_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018av_p-lab-en.xml',
'vr_cssf018aw_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aw_p-err-en.xml',
'vr_cssf018aw_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018aw_p-lab-en.xml',
'vr_cssf018ax_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ax_p-err-en.xml',
'vr_cssf018ax_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ax_p-lab-en.xml',
'vr_cssf018ay_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ay_p-err-en.xml',
'vr_cssf018ay_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ay_p-lab-en.xml',
'vr_cssf018b1_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b1_p-err-en.xml',
'vr_cssf018b1_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b1_p-lab-en.xml',
'vr_cssf018b2_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b2_p-err-en.xml',
'vr_cssf018b2_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b2_p-lab-en.xml',
'vr_cssf018b3_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b3_p-err-en.xml',
'vr_cssf018b3_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b3_p-lab-en.xml',
'vr_cssf018b7_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b7_p-err-en.xml',
'vr_cssf018b7_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b7_p-lab-en.xml',
'vr_cssf018b8_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b8_p-err-en.xml',
'vr_cssf018b8_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b8_p-lab-en.xml',
'vr_cssf018b9_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b9_p-err-en.xml',
'vr_cssf018b9_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018b9_p-lab-en.xml',
'vr_cssf018bb_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bb_p-err-en.xml',
'vr_cssf018bb_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bb_p-lab-en.xml',
'vr_cssf018bc_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bc_p-err-en.xml',
'vr_cssf018bc_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bc_p-lab-en.xml',
'vr_cssf018bd_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bd_p-err-en.xml',
'vr_cssf018bd_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bd_p-lab-en.xml',
'vr_cssf018be_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018be_p-err-en.xml',
'vr_cssf018be_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018be_p-lab-en.xml',
'vr_cssf018bf_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bf_p-err-en.xml',
'vr_cssf018bf_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bf_p-lab-en.xml',
'vr_cssf018bh_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bh_p-err-en.xml',
'vr_cssf018bh_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bh_p-lab-en.xml',
'vr_cssf018bi_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bi_p-err-en.xml',
'vr_cssf018bi_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bi_p-lab-en.xml',
'vr_cssf018bj_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bj_p-err-en.xml',
'vr_cssf018bj_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bj_p-lab-en.xml',
'vr_cssf018bk_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bk_p-err-en.xml',
'vr_cssf018bk_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bk_p-lab-en.xml',
'vr_cssf018bl_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bl_p-err-en.xml',
'vr_cssf018bl_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bl_p-lab-en.xml',
'vr_cssf018bm_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bm_p-err-en.xml',
'vr_cssf018bm_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bm_p-lab-en.xml',
'vr_cssf018bn_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bn_p-err-en.xml',
'vr_cssf018bn_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bn_p-lab-en.xml',
'vr_cssf018bo_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bo_p-err-en.xml',
'vr_cssf018bo_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bo_p-lab-en.xml',
'vr_cssf018bp_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bp_p-err-en.xml',
'vr_cssf018bp_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bp_p-lab-en.xml',
'vr_cssf018bq_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bq_p-err-en.xml',
'vr_cssf018bq_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bq_p-lab-en.xml',
'vr_cssf018br_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018br_p-err-en.xml',
'vr_cssf018br_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018br_p-lab-en.xml',
'vr_cssf018bs_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bs_p-err-en.xml',
'vr_cssf018bs_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bs_p-lab-en.xml',
'vr_cssf018bt_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bt_p-err-en.xml',
'vr_cssf018bt_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bt_p-lab-en.xml',
'vr_cssf018bu_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bu_p-err-en.xml',
'vr_cssf018bu_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bu_p-lab-en.xml',
'vr_cssf018bv_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bv_p-err-en.xml',
'vr_cssf018bv_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bv_p-lab-en.xml',
'vr_cssf018bw_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bw_p-err-en.xml',
'vr_cssf018bw_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018bw_p-lab-en.xml',
'vr_cssf018by_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018by_p-err-en.xml',
'vr_cssf018by_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018by_p-lab-en.xml',
'vr_cssf018c3_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018c3_p-err-en.xml',
'vr_cssf018c3_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018c3_p-lab-en.xml',
'vr_cssf018c7_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018c7_p-err-en.xml',
'vr_cssf018c7_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018c7_p-lab-en.xml',
'vr_cssf018cb_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cb_p-err-en.xml',
'vr_cssf018cb_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cb_p-lab-en.xml',
'vr_cssf018cc_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cc_p-err-en.xml',
'vr_cssf018cc_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cc_p-lab-en.xml',
'vr_cssf018ce_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ce_p-err-en.xml',
'vr_cssf018ce_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ce_p-lab-en.xml',
'vr_cssf018cf_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cf_p-err-en.xml',
'vr_cssf018cf_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cf_p-lab-en.xml',
'vr_cssf018cg_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cg_p-err-en.xml',
'vr_cssf018cg_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cg_p-lab-en.xml',
'vr_cssf018ch_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ch_p-err-en.xml',
'vr_cssf018ch_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ch_p-lab-en.xml',
'vr_cssf018ci_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ci_p-err-en.xml',
'vr_cssf018ci_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ci_p-lab-en.xml',
'vr_cssf018cj_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cj_p-err-en.xml',
'vr_cssf018cj_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cj_p-lab-en.xml',
'vr_cssf018ck_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ck_p-err-en.xml',
'vr_cssf018ck_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ck_p-lab-en.xml',
'vr_cssf018cl_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cl_p-err-en.xml',
'vr_cssf018cl_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cl_p-lab-en.xml',
'vr_cssf018cm_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cm_p-err-en.xml',
'vr_cssf018cm_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cm_p-lab-en.xml',
'vr_cssf018cn_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cn_p-err-en.xml',
'vr_cssf018cn_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cn_p-lab-en.xml',
'vr_cssf018co_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018co_p-err-en.xml',
'vr_cssf018co_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018co_p-lab-en.xml',
'vr_cssf018cp_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cp_p-err-en.xml',
'vr_cssf018cp_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cp_p-lab-en.xml',
'vr_cssf018cq_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cq_p-err-en.xml',
'vr_cssf018cq_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cq_p-lab-en.xml',
'vr_cssf018cr_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cr_p-err-en.xml',
'vr_cssf018cr_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cr_p-lab-en.xml',
'vr_cssf018cs_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cs_p-err-en.xml',
'vr_cssf018cs_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cs_p-lab-en.xml',
'vr_cssf018ct_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ct_p-err-en.xml',
'vr_cssf018ct_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ct_p-lab-en.xml',
'vr_cssf018cu_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cu_p-err-en.xml',
'vr_cssf018cu_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cu_p-lab-en.xml',
'vr_cssf018cv_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cv_p-err-en.xml',
'vr_cssf018cv_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cv_p-lab-en.xml',
'vr_cssf018cw_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cw_p-err-en.xml',
'vr_cssf018cw_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cw_p-lab-en.xml',
'vr_cssf018cy_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cy_p-err-en.xml',
'vr_cssf018cy_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018cy_p-lab-en.xml',
'vr_cssf018d7_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018d7_p-err-en.xml',
'vr_cssf018d7_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018d7_p-lab-en.xml',
'vr_cssf018d8_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018d8_p-err-en.xml',
'vr_cssf018d8_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018d8_p-lab-en.xml',
'vr_cssf018da_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018da_p-err-en.xml',
'vr_cssf018da_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018da_p-lab-en.xml',
'vr_cssf018db_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018db_p-err-en.xml',
'vr_cssf018db_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018db_p-lab-en.xml',
'vr_cssf018dd_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dd_p-err-en.xml',
'vr_cssf018dd_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dd_p-lab-en.xml',
'vr_cssf018de_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018de_p-err-en.xml',
'vr_cssf018de_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018de_p-lab-en.xml',
'vr_cssf018dg_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dg_p-err-en.xml',
'vr_cssf018dg_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dg_p-lab-en.xml',
'vr_cssf018dh_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dh_p-err-en.xml',
'vr_cssf018dh_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dh_p-lab-en.xml',
'vr_cssf018di_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018di_p-err-en.xml',
'vr_cssf018di_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018di_p-lab-en.xml',
'vr_cssf018dj_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dj_p-err-en.xml',
'vr_cssf018dj_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dj_p-lab-en.xml',
'vr_cssf018dk_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dk_p-err-en.xml',
'vr_cssf018dk_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dk_p-lab-en.xml',
'vr_cssf018dl_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dl_p-err-en.xml',
'vr_cssf018dl_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dl_p-lab-en.xml',
'vr_cssf018dm_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dm_p-err-en.xml',
'vr_cssf018dm_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dm_p-lab-en.xml',
'vr_cssf018dn_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dn_p-err-en.xml',
'vr_cssf018dn_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dn_p-lab-en.xml',
'vr_cssf018do_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018do_p-err-en.xml',
'vr_cssf018do_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018do_p-lab-en.xml',
'vr_cssf018dp_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dp_p-err-en.xml',
'vr_cssf018dp_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dp_p-lab-en.xml',
'vr_cssf018dq_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dq_p-err-en.xml',
'vr_cssf018dq_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dq_p-lab-en.xml',
'vr_cssf018dr_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dr_p-err-en.xml',
'vr_cssf018dr_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dr_p-lab-en.xml',
'vr_cssf018ds_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ds_p-err-en.xml',
'vr_cssf018ds_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018ds_p-lab-en.xml',
'vr_cssf018dt_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dt_p-err-en.xml',
'vr_cssf018dt_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dt_p-lab-en.xml',
'vr_cssf018du_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018du_p-err-en.xml',
'vr_cssf018du_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018du_p-lab-en.xml',
'vr_cssf018dv_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dv_p-err-en.xml',
'vr_cssf018dv_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dv_p-lab-en.xml',
'vr_cssf018dw_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dw_p-err-en.xml',
'vr_cssf018dw_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dw_p-lab-en.xml',
'vr_cssf018dy_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dy_p-err-en.xml',
'vr_cssf018dy_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf018dy_p-lab-en.xml',
'vr_cssf019_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf019_p-err-en.xml',
'vr_cssf019_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf019_p-lab-en.xml',
'vr_cssf020_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf020_p-err-en.xml',
'vr_cssf020_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf020_p-lab-en.xml',
'vr_cssf021_p-err-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf021_p-err-en.xml',
'vr_cssf021_p-lab-en.xml': 'http://www.cssf.lu/lu/fr/xbrl/crr/fws/corep/its-2013-02/2015-02-12/val/vr_cssf021_p-lab-en.xml'
}
sampleCssfID = 'cssfC_03.00'
def loadXML(filesource, selectTopView, reloadViews, modelXbrl, controller):
startedAt = time.time()
try:
action = _("imported")
profileStat = "import"
if modelXbrl:
ModelDocument.load(modelXbrl, filesource.url)
if reloadViews:
modelXbrl.relationshipSets.clear() # relationships have to be re-cached
except ModelDocument.LoadingException:
controller.showStatus(_("Loading terminated, unrecoverable error"), 20000)
return
except Exception as err:
msg = _("Exception loading {0}: {1}, at {2}").format(
filesource.url,
err,
traceback.format_tb(sys.exc_info()[2]))
# not sure if message box can be shown from background thread
# tkinter.messagebox.showwarning(_("Exception loading"),msg, parent=self.parent)
controller.addToLog(msg);
controller.showStatus(_("Loading terminated, unrecoverable error"), 20000)
return
if modelXbrl and modelXbrl.modelDocument:
statTime = time.time() - startedAt
modelXbrl.profileStat(profileStat, statTime)
controller.addToLog(format_string(controller.modelManager.locale,
_("%s %s in %.2f secs"),
(action, filesource.url, statTime)))
if reloadViews:
if modelXbrl.hasTableRendering:
controller.showStatus(_("Initializing table rendering"))
RenderingEvaluator.init(modelXbrl)
controller.showStatus(_("CSSF files {0}, preparing views").format(action))
controller.waitForUiThreadQueue() # force status update
controller.uiThreadQueue.put((controller.showLoadedXbrl, [modelXbrl, True, selectTopView]))
else:
controller.addToLog(format_string(controller.modelManager.locale,
_("not successfully %s in %.2f secs"),
(action, time.time() - startedAt)))
def fileOpenURL(filename, modelXbrl, controller, selectTopView=False, reloadViews=False):
if filename:
filesource = None
# check for archive files
filesource = openFileSource(filename, controller,
checkIfXmlIsEis=controller.modelManager.disclosureSystem and
controller.modelManager.disclosureSystem.EFM)
if filesource.isArchive and not filesource.selection:
raise FileNotFoundError
if filename:
loadXML(filesource, selectTopView, reloadViews, modelXbrl, controller)
def loadAllCSSFFiles(controller):
if controller.modelManager is None or controller.modelManager.modelXbrl is None:
controller.addToLog(_("No DTS loaded."))
return
modelXbrl = controller.getModelXbrl()
currentAssertionSet = modelXbrl.relationshipSet(assertionSet)
objectsFrom = currentAssertionSet.fromModelObjects()
for obj in objectsFrom:
if obj.id==sampleCssfID:
# avoid reloading the linkbases if they have already been loaded once
controller.addToLog(_("CSSF checks already loaded."))
return
lastReference = len(linkbaseReferences)-1
for i, fileName in enumerate(linkbaseReferences.keys()):
url = linkbaseReferences[fileName]
if i==lastReference:
fileOpenURL(url, modelXbrl, controller, reloadViews=True)
else:
fileOpenURL(url, modelXbrl, controller, reloadViews=False)
def identifyFileType(modelXbrl, rootNode, filepath):
_class = ModelDocument.ModelDocument
if os.path.basename(filepath) in linkbaseReferences:
return (Type.LINKBASE, _class, rootNode)
else:
return (Type.UnknownXML, _class, rootNode)
def cssfToolsMenuExtender(cntlr, menu):
# Extend menu with an item for the improve compliance menu
menu.add_command(label=_("Load CSSF checks"),
underline=0,
command=lambda: loadAllCSSFFiles(cntlr) )
__pluginInfo__ = {
# Do not use _( ) in pluginInfo itself (it is applied later, after loading
'name': 'CSSF plausibility checks',
'version': '1.3',
'description': '''CSSF plausibility check in conformance with http://www.cssf.lu/fileadmin/files/Reporting_legal/Recueil_banques/CSSF_Plausibility_checks_Clean_version_220515.pdf.''',
'license': 'Apache-2',
'author': 'Acsone S. A.',
'copyright': '(c) Copyright Acsone S. A., All rights reserved.',
# classes of mount points (required)
'ModelDocument.IdentifyType': identifyFileType,
'CntlrWinMain.Menu.Tools': cssfToolsMenuExtender,
}
| 122.984816
| 187
| 0.636712
| 10,430
| 56,696
| 3.327421
| 0.039118
| 0.096816
| 0.109033
| 0.128858
| 0.917476
| 0.912577
| 0.725083
| 0.72174
| 0.717332
| 0.717332
| 0
| 0.148212
| 0.16756
| 56,696
| 460
| 188
| 123.252174
| 0.587126
| 0.010071
| 0
| 0.034404
| 0
| 0.788991
| 0.747135
| 0.149869
| 0
| 0
| 0
| 0
| 0.006881
| 1
| 0.011468
| false
| 0
| 0.018349
| 0
| 0.043578
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
617295639f393bff3296fcf1670a98992e6620f6
| 29,884
|
py
|
Python
|
sdk/python/pulumi_aws/route53/resolver_firewall_rule.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | 260
|
2018-06-18T14:57:00.000Z
|
2022-03-29T11:41:03.000Z
|
sdk/python/pulumi_aws/route53/resolver_firewall_rule.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,154
|
2018-06-19T20:38:20.000Z
|
2022-03-31T19:48:16.000Z
|
sdk/python/pulumi_aws/route53/resolver_firewall_rule.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | 115
|
2018-06-28T03:20:27.000Z
|
2022-03-29T11:41:06.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ResolverFirewallRuleArgs', 'ResolverFirewallRule']
@pulumi.input_type
class ResolverFirewallRuleArgs:
def __init__(__self__, *,
action: pulumi.Input[str],
firewall_domain_list_id: pulumi.Input[str],
firewall_rule_group_id: pulumi.Input[str],
priority: pulumi.Input[int],
block_override_dns_type: Optional[pulumi.Input[str]] = None,
block_override_domain: Optional[pulumi.Input[str]] = None,
block_override_ttl: Optional[pulumi.Input[int]] = None,
block_response: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ResolverFirewallRule resource.
:param pulumi.Input[str] action: The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
:param pulumi.Input[str] firewall_domain_list_id: The ID of the domain list that you want to use in the rule.
:param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group where you want to create the rule.
:param pulumi.Input[int] priority: The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
:param pulumi.Input[str] block_override_dns_type: The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
:param pulumi.Input[str] block_override_domain: The custom DNS record to send back in response to the query.
:param pulumi.Input[int] block_override_ttl: The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
:param pulumi.Input[str] block_response: The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
:param pulumi.Input[str] name: A name that lets you identify the rule, to manage and use it.
"""
pulumi.set(__self__, "action", action)
pulumi.set(__self__, "firewall_domain_list_id", firewall_domain_list_id)
pulumi.set(__self__, "firewall_rule_group_id", firewall_rule_group_id)
pulumi.set(__self__, "priority", priority)
if block_override_dns_type is not None:
pulumi.set(__self__, "block_override_dns_type", block_override_dns_type)
if block_override_domain is not None:
pulumi.set(__self__, "block_override_domain", block_override_domain)
if block_override_ttl is not None:
pulumi.set(__self__, "block_override_ttl", block_override_ttl)
if block_response is not None:
pulumi.set(__self__, "block_response", block_response)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def action(self) -> pulumi.Input[str]:
"""
The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: pulumi.Input[str]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="firewallDomainListId")
def firewall_domain_list_id(self) -> pulumi.Input[str]:
"""
The ID of the domain list that you want to use in the rule.
"""
return pulumi.get(self, "firewall_domain_list_id")
@firewall_domain_list_id.setter
def firewall_domain_list_id(self, value: pulumi.Input[str]):
pulumi.set(self, "firewall_domain_list_id", value)
@property
@pulumi.getter(name="firewallRuleGroupId")
def firewall_rule_group_id(self) -> pulumi.Input[str]:
"""
The unique identifier of the firewall rule group where you want to create the rule.
"""
return pulumi.get(self, "firewall_rule_group_id")
@firewall_rule_group_id.setter
def firewall_rule_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "firewall_rule_group_id", value)
@property
@pulumi.getter
def priority(self) -> pulumi.Input[int]:
"""
The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: pulumi.Input[int]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="blockOverrideDnsType")
def block_override_dns_type(self) -> Optional[pulumi.Input[str]]:
"""
The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
"""
return pulumi.get(self, "block_override_dns_type")
@block_override_dns_type.setter
def block_override_dns_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_override_dns_type", value)
@property
@pulumi.getter(name="blockOverrideDomain")
def block_override_domain(self) -> Optional[pulumi.Input[str]]:
"""
The custom DNS record to send back in response to the query.
"""
return pulumi.get(self, "block_override_domain")
@block_override_domain.setter
def block_override_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_override_domain", value)
@property
@pulumi.getter(name="blockOverrideTtl")
def block_override_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
"""
return pulumi.get(self, "block_override_ttl")
@block_override_ttl.setter
def block_override_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "block_override_ttl", value)
@property
@pulumi.getter(name="blockResponse")
def block_response(self) -> Optional[pulumi.Input[str]]:
"""
The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
"""
return pulumi.get(self, "block_response")
@block_response.setter
def block_response(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_response", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
A name that lets you identify the rule, to manage and use it.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _ResolverFirewallRuleState:
def __init__(__self__, *,
action: Optional[pulumi.Input[str]] = None,
block_override_dns_type: Optional[pulumi.Input[str]] = None,
block_override_domain: Optional[pulumi.Input[str]] = None,
block_override_ttl: Optional[pulumi.Input[int]] = None,
block_response: Optional[pulumi.Input[str]] = None,
firewall_domain_list_id: Optional[pulumi.Input[str]] = None,
firewall_rule_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering ResolverFirewallRule resources.
:param pulumi.Input[str] action: The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
:param pulumi.Input[str] block_override_dns_type: The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
:param pulumi.Input[str] block_override_domain: The custom DNS record to send back in response to the query.
:param pulumi.Input[int] block_override_ttl: The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
:param pulumi.Input[str] block_response: The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
:param pulumi.Input[str] firewall_domain_list_id: The ID of the domain list that you want to use in the rule.
:param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group where you want to create the rule.
:param pulumi.Input[str] name: A name that lets you identify the rule, to manage and use it.
:param pulumi.Input[int] priority: The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
if action is not None:
pulumi.set(__self__, "action", action)
if block_override_dns_type is not None:
pulumi.set(__self__, "block_override_dns_type", block_override_dns_type)
if block_override_domain is not None:
pulumi.set(__self__, "block_override_domain", block_override_domain)
if block_override_ttl is not None:
pulumi.set(__self__, "block_override_ttl", block_override_ttl)
if block_response is not None:
pulumi.set(__self__, "block_response", block_response)
if firewall_domain_list_id is not None:
pulumi.set(__self__, "firewall_domain_list_id", firewall_domain_list_id)
if firewall_rule_group_id is not None:
pulumi.set(__self__, "firewall_rule_group_id", firewall_rule_group_id)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter
def action(self) -> Optional[pulumi.Input[str]]:
"""
The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="blockOverrideDnsType")
def block_override_dns_type(self) -> Optional[pulumi.Input[str]]:
"""
The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
"""
return pulumi.get(self, "block_override_dns_type")
@block_override_dns_type.setter
def block_override_dns_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_override_dns_type", value)
@property
@pulumi.getter(name="blockOverrideDomain")
def block_override_domain(self) -> Optional[pulumi.Input[str]]:
"""
The custom DNS record to send back in response to the query.
"""
return pulumi.get(self, "block_override_domain")
@block_override_domain.setter
def block_override_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_override_domain", value)
@property
@pulumi.getter(name="blockOverrideTtl")
def block_override_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
"""
return pulumi.get(self, "block_override_ttl")
@block_override_ttl.setter
def block_override_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "block_override_ttl", value)
@property
@pulumi.getter(name="blockResponse")
def block_response(self) -> Optional[pulumi.Input[str]]:
"""
The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
"""
return pulumi.get(self, "block_response")
@block_response.setter
def block_response(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "block_response", value)
@property
@pulumi.getter(name="firewallDomainListId")
def firewall_domain_list_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the domain list that you want to use in the rule.
"""
return pulumi.get(self, "firewall_domain_list_id")
@firewall_domain_list_id.setter
def firewall_domain_list_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "firewall_domain_list_id", value)
@property
@pulumi.getter(name="firewallRuleGroupId")
def firewall_rule_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The unique identifier of the firewall rule group where you want to create the rule.
"""
return pulumi.get(self, "firewall_rule_group_id")
@firewall_rule_group_id.setter
def firewall_rule_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "firewall_rule_group_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
A name that lets you identify the rule, to manage and use it.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
class ResolverFirewallRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
block_override_dns_type: Optional[pulumi.Input[str]] = None,
block_override_domain: Optional[pulumi.Input[str]] = None,
block_override_ttl: Optional[pulumi.Input[int]] = None,
block_response: Optional[pulumi.Input[str]] = None,
firewall_domain_list_id: Optional[pulumi.Input[str]] = None,
firewall_rule_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Provides a Route 53 Resolver DNS Firewall rule resource.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_resolver_firewall_domain_list = aws.route53.ResolverFirewallDomainList("exampleResolverFirewallDomainList",
domains=["example.com"],
tags={})
example_resolver_firewall_rule_group = aws.route53.ResolverFirewallRuleGroup("exampleResolverFirewallRuleGroup", tags={})
example_resolver_firewall_rule = aws.route53.ResolverFirewallRule("exampleResolverFirewallRule",
action="BLOCK",
block_override_dns_type="CNAME",
block_override_domain="example.com",
block_override_ttl=1,
block_response="OVERRIDE",
firewall_domain_list_id=example_resolver_firewall_domain_list.id,
firewall_rule_group_id=example_resolver_firewall_rule_group.id,
priority=100)
```
## Import
Route 53 Resolver DNS Firewall rules can be imported using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':', e.g.
```sh
$ pulumi import aws:route53/resolverFirewallRule:ResolverFirewallRule example rslvr-frg-0123456789abcdef:rslvr-fdl-0123456789abcdef
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action: The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
:param pulumi.Input[str] block_override_dns_type: The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
:param pulumi.Input[str] block_override_domain: The custom DNS record to send back in response to the query.
:param pulumi.Input[int] block_override_ttl: The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
:param pulumi.Input[str] block_response: The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
:param pulumi.Input[str] firewall_domain_list_id: The ID of the domain list that you want to use in the rule.
:param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group where you want to create the rule.
:param pulumi.Input[str] name: A name that lets you identify the rule, to manage and use it.
:param pulumi.Input[int] priority: The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ResolverFirewallRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Route 53 Resolver DNS Firewall rule resource.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_resolver_firewall_domain_list = aws.route53.ResolverFirewallDomainList("exampleResolverFirewallDomainList",
domains=["example.com"],
tags={})
example_resolver_firewall_rule_group = aws.route53.ResolverFirewallRuleGroup("exampleResolverFirewallRuleGroup", tags={})
example_resolver_firewall_rule = aws.route53.ResolverFirewallRule("exampleResolverFirewallRule",
action="BLOCK",
block_override_dns_type="CNAME",
block_override_domain="example.com",
block_override_ttl=1,
block_response="OVERRIDE",
firewall_domain_list_id=example_resolver_firewall_domain_list.id,
firewall_rule_group_id=example_resolver_firewall_rule_group.id,
priority=100)
```
## Import
Route 53 Resolver DNS Firewall rules can be imported using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':', e.g.
```sh
$ pulumi import aws:route53/resolverFirewallRule:ResolverFirewallRule example rslvr-frg-0123456789abcdef:rslvr-fdl-0123456789abcdef
```
:param str resource_name: The name of the resource.
:param ResolverFirewallRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ResolverFirewallRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
block_override_dns_type: Optional[pulumi.Input[str]] = None,
block_override_domain: Optional[pulumi.Input[str]] = None,
block_override_ttl: Optional[pulumi.Input[int]] = None,
block_response: Optional[pulumi.Input[str]] = None,
firewall_domain_list_id: Optional[pulumi.Input[str]] = None,
firewall_rule_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ResolverFirewallRuleArgs.__new__(ResolverFirewallRuleArgs)
if action is None and not opts.urn:
raise TypeError("Missing required property 'action'")
__props__.__dict__["action"] = action
__props__.__dict__["block_override_dns_type"] = block_override_dns_type
__props__.__dict__["block_override_domain"] = block_override_domain
__props__.__dict__["block_override_ttl"] = block_override_ttl
__props__.__dict__["block_response"] = block_response
if firewall_domain_list_id is None and not opts.urn:
raise TypeError("Missing required property 'firewall_domain_list_id'")
__props__.__dict__["firewall_domain_list_id"] = firewall_domain_list_id
if firewall_rule_group_id is None and not opts.urn:
raise TypeError("Missing required property 'firewall_rule_group_id'")
__props__.__dict__["firewall_rule_group_id"] = firewall_rule_group_id
__props__.__dict__["name"] = name
if priority is None and not opts.urn:
raise TypeError("Missing required property 'priority'")
__props__.__dict__["priority"] = priority
super(ResolverFirewallRule, __self__).__init__(
'aws:route53/resolverFirewallRule:ResolverFirewallRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
block_override_dns_type: Optional[pulumi.Input[str]] = None,
block_override_domain: Optional[pulumi.Input[str]] = None,
block_override_ttl: Optional[pulumi.Input[int]] = None,
block_response: Optional[pulumi.Input[str]] = None,
firewall_domain_list_id: Optional[pulumi.Input[str]] = None,
firewall_rule_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None) -> 'ResolverFirewallRule':
"""
Get an existing ResolverFirewallRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action: The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
:param pulumi.Input[str] block_override_dns_type: The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
:param pulumi.Input[str] block_override_domain: The custom DNS record to send back in response to the query.
:param pulumi.Input[int] block_override_ttl: The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
:param pulumi.Input[str] block_response: The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
:param pulumi.Input[str] firewall_domain_list_id: The ID of the domain list that you want to use in the rule.
:param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group where you want to create the rule.
:param pulumi.Input[str] name: A name that lets you identify the rule, to manage and use it.
:param pulumi.Input[int] priority: The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ResolverFirewallRuleState.__new__(_ResolverFirewallRuleState)
__props__.__dict__["action"] = action
__props__.__dict__["block_override_dns_type"] = block_override_dns_type
__props__.__dict__["block_override_domain"] = block_override_domain
__props__.__dict__["block_override_ttl"] = block_override_ttl
__props__.__dict__["block_response"] = block_response
__props__.__dict__["firewall_domain_list_id"] = firewall_domain_list_id
__props__.__dict__["firewall_rule_group_id"] = firewall_rule_group_id
__props__.__dict__["name"] = name
__props__.__dict__["priority"] = priority
return ResolverFirewallRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def action(self) -> pulumi.Output[str]:
"""
The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`.
"""
return pulumi.get(self, "action")
@property
@pulumi.getter(name="blockOverrideDnsType")
def block_override_dns_type(self) -> pulumi.Output[Optional[str]]:
"""
The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`.
"""
return pulumi.get(self, "block_override_dns_type")
@property
@pulumi.getter(name="blockOverrideDomain")
def block_override_domain(self) -> pulumi.Output[Optional[str]]:
"""
The custom DNS record to send back in response to the query.
"""
return pulumi.get(self, "block_override_domain")
@property
@pulumi.getter(name="blockOverrideTtl")
def block_override_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800.
"""
return pulumi.get(self, "block_override_ttl")
@property
@pulumi.getter(name="blockResponse")
def block_response(self) -> pulumi.Output[Optional[str]]:
"""
The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`.
"""
return pulumi.get(self, "block_response")
@property
@pulumi.getter(name="firewallDomainListId")
def firewall_domain_list_id(self) -> pulumi.Output[str]:
"""
The ID of the domain list that you want to use in the rule.
"""
return pulumi.get(self, "firewall_domain_list_id")
@property
@pulumi.getter(name="firewallRuleGroupId")
def firewall_rule_group_id(self) -> pulumi.Output[str]:
"""
The unique identifier of the firewall rule group where you want to create the rule.
"""
return pulumi.get(self, "firewall_rule_group_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
A name that lets you identify the rule, to manage and use it.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[int]:
"""
The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting.
"""
return pulumi.get(self, "priority")
| 50.996587
| 227
| 0.677587
| 3,788
| 29,884
| 5.116156
| 0.058342
| 0.068679
| 0.067183
| 0.0613
| 0.902993
| 0.889216
| 0.878638
| 0.861404
| 0.855573
| 0.850413
| 0
| 0.005582
| 0.232733
| 29,884
| 585
| 228
| 51.083761
| 0.839635
| 0.397939
| 0
| 0.742236
| 1
| 0
| 0.122065
| 0.0561
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161491
| false
| 0.003106
| 0.015528
| 0
| 0.273292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
61a435bb6e6fe8eb359357872a5d950c745e3529
| 30,979
|
py
|
Python
|
src/gui/abcw_gui.py
|
acid-base-calculation-wizard/ph-calculator
|
eb8c6a13f443bfc776e3f9ac466695432fa250fd
|
[
"MIT"
] | 9
|
2019-07-04T20:05:42.000Z
|
2020-02-12T08:17:05.000Z
|
src/gui/abcw_gui.py
|
acid-base-calculation-wizard/ph-calculator
|
eb8c6a13f443bfc776e3f9ac466695432fa250fd
|
[
"MIT"
] | 2
|
2021-06-02T02:03:49.000Z
|
2021-10-13T00:35:43.000Z
|
src/gui/abcw_gui.py
|
acid-base-calculation-wizard/abcw
|
eb8c6a13f443bfc776e3f9ac466695432fa250fd
|
[
"MIT"
] | 1
|
2019-08-15T06:55:10.000Z
|
2019-08-15T06:55:10.000Z
|
#!/usr/bin/env python3
"""
version : v3.1.0-alpha
MIT License
Copyright (c) 2019-2020 Lee Kyung-ha <i_am@nulleekh.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
from ctypes import *
# Import modules.
MAX_NUMBER_LENGTH = 255
NUMBER_OF_DB_SOLUTE = 255
NUMBER_OF_DB_DATA = 6
LENGTH_OF_DB_DATA = 32
MAX_SOLUTION_NUMBER = 255
PH_CALCULATOR_STARTPOINT = c_double(-3)
PH_CALCULATOR_ENDPOINT = c_double(18)
PH_CALCULATOR_RESOLUTION = c_double(0.000001)
PH_CALCULATOR_INITIALINTERVAL = c_double(1)
# Declare variables(as constants) for operating.
main_window_ui = uic.loadUiType("gui/general/main_window.ui")[0]
result_window_ui = uic.loadUiType("gui/general/result_window.ui")[0]
about_ui = uic.loadUiType("gui/general/about.ui")[0]
strong_solution_ui = uic.loadUiType("gui/general/strong_solution.ui")[0]
type_1_ui = uic.loadUiType("gui/general/type_1.ui")[0]
type_2_ui = uic.loadUiType("gui/general/type_2.ui")[0]
# Load ui files which generally used.
ph_calculator_ui = uic.loadUiType("gui/ph-calculator/ph_calculator.ui")[0]
# Load ui file for ph-calculator.
select_titrant_ui = uic.loadUiType("gui/general/select_titrant.ui")[0]
# Load ui file for graph-generator and recipe-finder.
graph_generator_ui = uic.loadUiType("gui/graph-generator/graph_generator.ui")[0]
strong_titrant_graph_ui = uic.loadUiType("gui/graph-generator/strong_titrant_graph.ui")[0]
type_1_titrant_graph_ui = uic.loadUiType("gui/graph-generator/type_1_titrant_graph.ui")[0]
type_2_titrant_graph_ui = uic.loadUiType("gui/graph-generator/type_2_titrant_graph.ui")[0]
# Load ui files for graph-generator.
recipe_finder_ui = uic.loadUiType("gui/recipe-finder/recipe_finder.ui")[0]
target_ph_recipe_ui = uic.loadUiType("gui/recipe-finder/target_ph_recipe.ui")[0]
strong_titrant_recipe_ui = uic.loadUiType("gui/recipe-finder/strong_titrant_recipe.ui")[0]
type_1_titrant_recipe_ui = uic.loadUiType("gui/recipe-finder/type_1_titrant_recipe.ui")[0]
type_2_titrant_recipe_ui = uic.loadUiType("gui/recipe-finder/type_2_titrant_recipe.ui")[0]
# Load ui files for recipe-finder.
pHCalcFunc = cdll.LoadLibrary('pHCalcFunc.dll')
# Load library file for operating.
input_title = "Default"
class MainWindow(QMainWindow, main_window_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.calculatorButton.clicked.connect(self.calculator_clicked)
self.generatorButton.clicked.connect(self.generator_clicked)
self.finderButton.clicked.connect(self.finder_clicked)
self.aboutButton.clicked.connect(self.about_clicked)
@staticmethod
def calculator_clicked():
fw = Calculator()
fw.exec_()
@staticmethod
def generator_clicked():
fw = Generator()
fw.exec_()
@staticmethod
def finder_clicked():
fw = Finder()
fw.exec_()
@staticmethod
def about_clicked():
fw = About()
fw.exec_()
class About(QDialog, about_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.doneButton.clicked.connect(self.done_clicked)
# Call done_clicked function when done button clicked.
def done_clicked(self):
self.close()
# Close window.
class Calculator(QDialog, ph_calculator_ui):
solution_type = "Default"
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
index_type_1 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
index_type_2 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_acid = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_base = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_type_1 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
mol_type_2 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
# Declare variables to collect input values.
num_acid = int(self.strongAcid.text())
if 0 > num_acid | num_acid > 255:
print("ERROR - Invalid num_acid number.")
print("INFO - Calculator ended.")
exit()
num_base = int(self.strongBase.text())
if 0 > num_base | num_base > 255:
print("ERROR - Invalid num_base number.")
print("INFO - Calculator ended.")
exit()
num_type_1 = int(self.type1.text())
if 0 > num_type_1 | num_type_1 > 255:
print("ERROR - Invalid num_type_1 number.")
print("INFO - Calculator ended.")
exit()
num_type_2 = int(self.type2.text())
if 0 > num_type_2 | num_type_2 > 255:
print("ERROR - Invalid num_type_2 number.")
print("INFO - Calculator ended.")
exit()
# Get numbers of each elements.
volume_all = float(self.pureWater.text())
# Get volume of pure water.
for i in range(num_acid):
input_window = StrongSolution()
input_window.strong_acid()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_acid[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong acid.
for i in range(num_base):
input_window = StrongSolution()
input_window.strong_base()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_base[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong base.
for i in range(num_type_1):
input_window = Type1()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_1[i] = c_long(pHCalcFunc.SpecifyType1Solute(name_temp.encode(), name_length))
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_type_1[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of type 1.
for i in range(num_type_2):
input_window = Type2()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_2[i] = c_long(pHCalcFunc.SpecifyType2Solute(name_temp.encode(), name_length))
mol_type_2[i] = (c_double(float(input_window.mol.text())))
# Get creation and mol of type 2.
pHCalcFunc.PhCalculator.restype = c_double
result = float(pHCalcFunc.PhCalculator(c_long(num_acid),
c_long(num_base), c_long(num_type_1),
c_long(num_type_2), concentration_acid, concentration_base,
concentration_type_1,
mol_type_2, index_type_1, index_type_2, c_double(volume_all)))
# Call ph-calculator function to operate calculation.
result_window = Result()
result_window.represent_result(str("pH : " + str(round(result, 3))))
result_window.exec_()
# Print out "pH : " and result value on result window
self.close()
# Close window.
class Generator(QDialog, graph_generator_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
index_type_1 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
index_type_2 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_acid = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_base = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_type_1 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
mol_type_2 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
# Declare variables to collect input values.
num_acid = int(self.strongAcid.text())
if 0 > num_acid | num_acid > 255:
print("ERROR - Invalid num_acid number.")
print("INFO - Calculator ended.")
exit()
num_base = int(self.strongBase.text())
if 0 > num_base | num_base > 255:
print("ERROR - Invalid num_base number.")
print("INFO - Calculator ended.")
exit()
num_type_1 = int(self.type1.text())
if 0 > num_type_1 | num_type_1 > 255:
print("ERROR - Invalid num_type_1 number.")
print("INFO - Calculator ended.")
exit()
num_type_2 = int(self.type2.text())
if 0 > num_type_2 | num_type_2 > 255:
print("ERROR - Invalid num_type_2 number.")
print("INFO - Calculator ended.")
exit()
# Get numbers of each elements.
volume_all = float(self.pureWater.text())
# Get volume of pure water.
for i in range(num_acid):
input_window = StrongSolution()
input_window.strong_acid()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_acid[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong acid.
for i in range(num_base):
input_window = StrongSolution()
input_window.strong_base()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_base[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong base.
for i in range(num_type_1):
input_window = Type1()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_1[i] = c_long(pHCalcFunc.SpecifyType1Solute(name_temp.encode(), name_length))
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_type_1[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of type 1.
for i in range(num_type_2):
input_window = Type2()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_2[i] = c_long(pHCalcFunc.SpecifyType2Solute(name_temp.encode(), name_length))
mol_type_2[i] = (c_double(float(input_window.mol.text())))
# Get creation and mol of type 2.
input_window = SelectTitrant()
input_window.exec_()
# Select type of titrant.
time_titrant = -1
volume_titrant = -1
concentration_titrant = -1
index_titrant = -1
# Initialize the value of variables that need for operation.
if titrant_flag == 0:
input_window = StrongTitrantGraph()
input_window.strong_acid()
input_window.exec_()
time_titrant = int(input_window.times.text())
volume_titrant = float(input_window.volume.text())
concentration_titrant = float(input_window.concentration.text())
# Get strong acid titrant's data.
elif titrant_flag == 1:
input_window = StrongTitrantGraph()
input_window.strong_base()
input_window.exec_()
time_titrant = int(input_window.times.text())
volume_titrant = float(input_window.volume.text())
concentration_titrant = float(input_window.concentration.text())
# Get strong base titrant's data.
elif titrant_flag == 2:
input_window = Type1TitrantGraph()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_titrant = int(pHCalcFunc.SpecifyType1Solute(name_temp.encode(), name_length))
time_titrant = int(input_window.times.text())
volume_titrant = float(input_window.volume.text())
concentration_titrant = float(input_window.concentration.text())
# Get type 1 titrant's data.
elif titrant_flag == 3:
input_window = Type2TitrantGraph()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_titrant = int(pHCalcFunc.SpecifyType2Solute(name_temp.encode(), name_length))
time_titrant = int(input_window.times.text())
concentration_titrant = float(input_window.mol.text())
# Get type 2 titrant's data.
pHCalcFunc.GraphGenerator.restype = c_int
result = int(pHCalcFunc.GraphGenerator(c_long(num_acid),
c_long(num_base), c_long(num_type_1),
c_long(num_type_2), concentration_acid, concentration_base,
concentration_type_1,
mol_type_2, index_type_1, index_type_2, c_double(volume_all),
c_long(titrant_flag), c_long(time_titrant), c_double(volume_titrant),
c_double(concentration_titrant), c_long(index_titrant)))
# Call graph-generator function to operate calculation.
if result == 0:
result_window = Result()
result_window.represent_result("Graph Generated")
result_window.exec_()
# Print out "Graph Generated" on result window
else:
result_window = Result()
result_window.represent_result("Operation Failed")
result_window.exec_()
# Print out "Operation Failed" on result window
self.close()
# Close window.
class Finder(QDialog, recipe_finder_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
index_type_1 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
index_type_2 = (c_long * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_acid = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_base = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
concentration_type_1 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
mol_type_2 = (c_double * MAX_SOLUTION_NUMBER)(*range(MAX_SOLUTION_NUMBER))
# Declare variables to collect input values.
num_acid = int(self.strongAcid.text())
if 0 > num_acid | num_acid > 255:
print("ERROR - Invalid num_acid number.")
print("INFO - Calculator ended.")
exit()
num_base = int(self.strongBase.text())
if 0 > num_base | num_base > 255:
print("ERROR - Invalid num_base number.")
print("INFO - Calculator ended.")
exit()
num_type_1 = int(self.type1.text())
if 0 > num_type_1 | num_type_1 > 255:
print("ERROR - Invalid num_type_1 number.")
print("INFO - Calculator ended.")
exit()
num_type_2 = int(self.type2.text())
if 0 > num_type_2 | num_type_2 > 255:
print("ERROR - Invalid num_type_2 number.")
print("INFO - Calculator ended.")
exit()
# Get numbers of each elements.
volume_all = float(self.pureWater.text())
# Get volume of pure water.
for i in range(num_acid):
input_window = StrongSolution()
input_window.strong_acid()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_acid[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong acid.
for i in range(num_base):
input_window = StrongSolution()
input_window.strong_base()
input_window.exec_()
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_base[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of strong base.
for i in range(num_type_1):
input_window = Type1()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_1[i] = c_long(pHCalcFunc.SpecifyType1Solute(name_temp.encode(), name_length))
volume_temp = float(input_window.volume.text())
volume_all += volume_temp
concentration_temp = float(input_window.concentration.text())
concentration_type_1[i] = (c_double(concentration_temp * volume_temp))
# Get creation and volume of type 1.
for i in range(num_type_2):
input_window = Type2()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_type_2[i] = c_long(pHCalcFunc.SpecifyType2Solute(name_temp.encode(), name_length))
mol_type_2[i] = (c_double(float(input_window.mol.text())))
# Get creation and mol of type 2.
input_window = TargetPH()
input_window.exec_()
# Get target pH value.
input_window = SelectTitrant()
input_window.exec_()
# Select type of titrant.
concentration_titrant = -1
index_titrant = -1
# Initialize the value of variables that need for operation.
if titrant_flag == 0:
input_window = StrongTitrantRecipe()
input_window.strong_acid()
input_window.exec_()
concentration_titrant = float(input_window.concentration.text())
# Get strong acid titrant's data.
elif titrant_flag == 1:
input_window = StrongTitrantRecipe()
input_window.strong_base()
input_window.exec_()
concentration_titrant = float(input_window.concentration.text())
# Get strong base titrant's data.
elif titrant_flag == 2:
input_window = Type1TitrantRecipe()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_titrant = int(pHCalcFunc.SpecifyType1Solute(name_temp.encode(), name_length))
concentration_titrant = float(input_window.concentration.text())
# Get type 1 titrant's data.
elif titrant_flag == 3:
input_window = Type2TitrantRecipe()
input_window.exec_()
name_temp = str(input_window.name.text())
name_length = c_long(len(name_temp))
index_titrant = int(pHCalcFunc.SpecifyType2Solute(name_temp.encode(), name_length))
# Get type 2 titrant's data.
pHCalcFunc.RecipeFinder.restype = c_double
result = float(pHCalcFunc.RecipeFinder(c_long(num_acid),
c_long(num_base), c_long(num_type_1),
c_long(num_type_2), concentration_acid, concentration_base,
concentration_type_1,
mol_type_2, index_type_1, index_type_2, c_double(volume_all),
c_long(titrant_flag), c_double(concentration_titrant),
c_long(index_titrant), c_double(target_ph)))
# Call recipe-finder function to operate calculation.
result_window = Result()
result_window.represent_result("Result : " + str(round(result, 6)))
result_window.exec_()
# Print out "Result : " and result value on result window
self.close()
# Close window.
class StrongSolution(QDialog, strong_solution_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def strong_acid(self):
self.solutionType.setText("Strong Acid")
# Print out "Strong Acid" in the window.
def strong_base(self):
self.solutionType.setText("Strong Base")
# Print out "Strong Base" in the window.
def enter_clicked(self):
self.close()
# Close window.
class Type1(QDialog, type_1_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class Type2(QDialog, type_2_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class SelectTitrant(QDialog, select_titrant_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.strongAcidButton.clicked.connect(self.strong_acid_clicked)
# Call strong_acid_clicked function when enter button clicked.
self.strongBaseButton.clicked.connect(self.strong_base_clicked)
# Call strong_base_clicked function when enter button clicked.
self.type1Button.clicked.connect(self.type_1_clicked)
# Call type_1_clicked function when enter button clicked.
self.type2Button.clicked.connect(self.type_2_clicked)
# Call type_2_clicked function when enter button clicked.
def strong_acid_clicked(self):
global titrant_flag
titrant_flag = 0
self.close()
# Close window.
def strong_base_clicked(self):
global titrant_flag
titrant_flag = 1
self.close()
# Close window.
def type_1_clicked(self):
global titrant_flag
titrant_flag = 2
self.close()
# Close window.
def type_2_clicked(self):
global titrant_flag
titrant_flag = 3
self.close()
# Close window.
class StrongTitrantGraph(QDialog, strong_titrant_graph_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def strong_acid(self):
self.solutionType.setText("Strong Acid")
# Print out "Strong Acid" in the window.
def strong_base(self):
self.solutionType.setText("Strong Base")
# Print out "Strong Base" in the window.
def enter_clicked(self):
self.close()
# Close window.
class Type1TitrantGraph(QDialog, type_1_titrant_graph_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class Type2TitrantGraph(QDialog, type_2_titrant_graph_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class TargetPH(QDialog, target_ph_recipe_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
global target_ph
target_ph = float(self.pH.text())
self.close()
# Close window.
class StrongTitrantRecipe(QDialog, strong_titrant_recipe_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def strong_acid(self):
self.solutionType.setText("Strong Acid")
# Print out "Strong Acid" in the window.
def strong_base(self):
self.solutionType.setText("Strong Base")
# Print out "Strong Base" in the window.
def enter_clicked(self):
self.close()
# Close window.
class Type1TitrantRecipe(QDialog, type_1_titrant_recipe_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class Type2TitrantRecipe(QDialog, type_2_titrant_recipe_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.enterButton.clicked.connect(self.enter_clicked)
# Call enter_clicked function when enter button clicked.
def enter_clicked(self):
self.close()
# Close window.
class Result(QDialog, result_window_ui):
def __init__(self):
super().__init__()
self.setupUi(self)
# Setup ui.
self.move(QDesktopWidget().availableGeometry().center() - self.frameGeometry().center())
# Move window to center.
self.doneButton.clicked.connect(self.done_clicked)
# Call done_clicked function when done button clicked.
def done_clicked(self):
self.close()
# Close window.
def represent_result(self, result):
self.result.setText(result)
# Print out result data.
if __name__ == "__main__":
application = QApplication(sys.argv)
main_window = MainWindow()
application.setWindowIcon(QIcon('gui/image/icon.png'))
main_window.show()
sys.exit(application.exec_())
| 37.145084
| 116
| 0.638271
| 3,665
| 30,979
| 5.112415
| 0.080491
| 0.059294
| 0.03357
| 0.020281
| 0.810108
| 0.789187
| 0.763089
| 0.733522
| 0.722154
| 0.709345
| 0
| 0.01158
| 0.264082
| 30,979
| 833
| 117
| 37.189676
| 0.81029
| 0.164047
| 0
| 0.783465
| 0
| 0
| 0.0552
| 0.021467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09252
| false
| 0
| 0.009843
| 0
| 0.137795
| 0.047244
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ee0ea38377e7d2ec42458dedc677e297dc3787d
| 39,416
|
py
|
Python
|
sdk/python/pulumi_oci/core/ipsec.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/core/ipsec.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/core/ipsec.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['IpsecArgs', 'Ipsec']
@pulumi.input_type
class IpsecArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
cpe_id: pulumi.Input[str],
drg_id: pulumi.Input[str],
static_routes: pulumi.Input[Sequence[pulumi.Input[str]]],
cpe_local_identifier: Optional[pulumi.Input[str]] = None,
cpe_local_identifier_type: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a Ipsec resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to contain the IPSec connection.
:param pulumi.Input[str] cpe_id: The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
:param pulumi.Input[str] drg_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
:param pulumi.Input[Sequence[pulumi.Input[str]]] static_routes: (Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
:param pulumi.Input[str] cpe_local_identifier: (Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
:param pulumi.Input[str] cpe_local_identifier_type: (Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "cpe_id", cpe_id)
pulumi.set(__self__, "drg_id", drg_id)
pulumi.set(__self__, "static_routes", static_routes)
if cpe_local_identifier is not None:
pulumi.set(__self__, "cpe_local_identifier", cpe_local_identifier)
if cpe_local_identifier_type is not None:
pulumi.set(__self__, "cpe_local_identifier_type", cpe_local_identifier_type)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The OCID of the compartment to contain the IPSec connection.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="cpeId")
def cpe_id(self) -> pulumi.Input[str]:
"""
The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
"""
return pulumi.get(self, "cpe_id")
@cpe_id.setter
def cpe_id(self, value: pulumi.Input[str]):
pulumi.set(self, "cpe_id", value)
@property
@pulumi.getter(name="drgId")
def drg_id(self) -> pulumi.Input[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
"""
return pulumi.get(self, "drg_id")
@drg_id.setter
def drg_id(self, value: pulumi.Input[str]):
pulumi.set(self, "drg_id", value)
@property
@pulumi.getter(name="staticRoutes")
def static_routes(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
(Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
"""
return pulumi.get(self, "static_routes")
@static_routes.setter
def static_routes(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "static_routes", value)
@property
@pulumi.getter(name="cpeLocalIdentifier")
def cpe_local_identifier(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
"""
return pulumi.get(self, "cpe_local_identifier")
@cpe_local_identifier.setter
def cpe_local_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpe_local_identifier", value)
@property
@pulumi.getter(name="cpeLocalIdentifierType")
def cpe_local_identifier_type(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
"""
return pulumi.get(self, "cpe_local_identifier_type")
@cpe_local_identifier_type.setter
def cpe_local_identifier_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpe_local_identifier_type", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@pulumi.input_type
class _IpsecState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
cpe_id: Optional[pulumi.Input[str]] = None,
cpe_local_identifier: Optional[pulumi.Input[str]] = None,
cpe_local_identifier_type: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
drg_id: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
state: Optional[pulumi.Input[str]] = None,
static_routes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
time_created: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Ipsec resources.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to contain the IPSec connection.
:param pulumi.Input[str] cpe_id: The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
:param pulumi.Input[str] cpe_local_identifier: (Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
:param pulumi.Input[str] cpe_local_identifier_type: (Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[str] drg_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] state: The IPSec connection's current state.
:param pulumi.Input[Sequence[pulumi.Input[str]]] static_routes: (Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
:param pulumi.Input[str] time_created: The date and time the IPSec connection was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if cpe_id is not None:
pulumi.set(__self__, "cpe_id", cpe_id)
if cpe_local_identifier is not None:
pulumi.set(__self__, "cpe_local_identifier", cpe_local_identifier)
if cpe_local_identifier_type is not None:
pulumi.set(__self__, "cpe_local_identifier_type", cpe_local_identifier_type)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if drg_id is not None:
pulumi.set(__self__, "drg_id", drg_id)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if state is not None:
pulumi.set(__self__, "state", state)
if static_routes is not None:
pulumi.set(__self__, "static_routes", static_routes)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the compartment to contain the IPSec connection.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="cpeId")
def cpe_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
"""
return pulumi.get(self, "cpe_id")
@cpe_id.setter
def cpe_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpe_id", value)
@property
@pulumi.getter(name="cpeLocalIdentifier")
def cpe_local_identifier(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
"""
return pulumi.get(self, "cpe_local_identifier")
@cpe_local_identifier.setter
def cpe_local_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpe_local_identifier", value)
@property
@pulumi.getter(name="cpeLocalIdentifierType")
def cpe_local_identifier_type(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
"""
return pulumi.get(self, "cpe_local_identifier_type")
@cpe_local_identifier_type.setter
def cpe_local_identifier_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpe_local_identifier_type", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="drgId")
def drg_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
"""
return pulumi.get(self, "drg_id")
@drg_id.setter
def drg_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "drg_id", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The IPSec connection's current state.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="staticRoutes")
def static_routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
"""
return pulumi.get(self, "static_routes")
@static_routes.setter
def static_routes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "static_routes", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the IPSec connection was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
class Ipsec(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpe_id: Optional[pulumi.Input[str]] = None,
cpe_local_identifier: Optional[pulumi.Input[str]] = None,
cpe_local_identifier_type: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
drg_id: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
static_routes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
This resource provides the Ip Sec Connection resource in Oracle Cloud Infrastructure Core service.
Creates a new IPSec connection between the specified DRG and CPE. For more information, see
[IPSec VPNs](https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingIPsec.htm).
If you configure at least one tunnel to use static routing, then in the request you must provide
at least one valid static route (you're allowed a maximum of 10). For example: 10.0.0.0/16.
If you configure both tunnels to use BGP dynamic routing, you can provide an empty list for
the static routes. For more information, see the important note in
[IPSecConnection](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnection/).
For the purposes of access control, you must provide the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where you want the
IPSec connection to reside. Notice that the IPSec connection doesn't have to be in the same compartment
as the DRG, CPE, or other Networking Service components. If you're not sure which compartment to
use, put the IPSec connection in the same compartment as the DRG. For more information about
compartments and access control, see
[Overview of the IAM Service](https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm).
You may optionally specify a *display name* for the IPSec connection, otherwise a default is provided.
It does not have to be unique, and you can change it. Avoid entering confidential information.
After creating the IPSec connection, you need to configure your on-premises router
with tunnel-specific information. For tunnel status and the required configuration information, see:
* [IPSecConnectionTunnel](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnectionTunnel/)
* [IPSecConnectionTunnelSharedSecret](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnectionTunnelSharedSecret/)
For each tunnel, you need the IP address of Oracle's VPN headend and the shared secret
(that is, the pre-shared key). For more information, see
[Configuring Your On-Premises Router for an IPSec VPN](https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/configuringCPE.htm).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_ip_sec_connection = oci.core.Ipsec("testIpSecConnection",
compartment_id=var["compartment_id"],
cpe_id=oci_core_cpe["test_cpe"]["id"],
drg_id=oci_core_drg["test_drg"]["id"],
static_routes=var["ip_sec_connection_static_routes"],
cpe_local_identifier=var["ip_sec_connection_cpe_local_identifier"],
cpe_local_identifier_type=var["ip_sec_connection_cpe_local_identifier_type"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["ip_sec_connection_display_name"],
freeform_tags={
"Department": "Finance",
})
```
## Import
IpSecConnections can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/ipsec:Ipsec test_ip_sec_connection "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to contain the IPSec connection.
:param pulumi.Input[str] cpe_id: The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
:param pulumi.Input[str] cpe_local_identifier: (Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
:param pulumi.Input[str] cpe_local_identifier_type: (Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[str] drg_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[Sequence[pulumi.Input[str]]] static_routes: (Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IpsecArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Ip Sec Connection resource in Oracle Cloud Infrastructure Core service.
Creates a new IPSec connection between the specified DRG and CPE. For more information, see
[IPSec VPNs](https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingIPsec.htm).
If you configure at least one tunnel to use static routing, then in the request you must provide
at least one valid static route (you're allowed a maximum of 10). For example: 10.0.0.0/16.
If you configure both tunnels to use BGP dynamic routing, you can provide an empty list for
the static routes. For more information, see the important note in
[IPSecConnection](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnection/).
For the purposes of access control, you must provide the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where you want the
IPSec connection to reside. Notice that the IPSec connection doesn't have to be in the same compartment
as the DRG, CPE, or other Networking Service components. If you're not sure which compartment to
use, put the IPSec connection in the same compartment as the DRG. For more information about
compartments and access control, see
[Overview of the IAM Service](https://docs.cloud.oracle.com/iaas/Content/Identity/Concepts/overview.htm).
You may optionally specify a *display name* for the IPSec connection, otherwise a default is provided.
It does not have to be unique, and you can change it. Avoid entering confidential information.
After creating the IPSec connection, you need to configure your on-premises router
with tunnel-specific information. For tunnel status and the required configuration information, see:
* [IPSecConnectionTunnel](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnectionTunnel/)
* [IPSecConnectionTunnelSharedSecret](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/IPSecConnectionTunnelSharedSecret/)
For each tunnel, you need the IP address of Oracle's VPN headend and the shared secret
(that is, the pre-shared key). For more information, see
[Configuring Your On-Premises Router for an IPSec VPN](https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/configuringCPE.htm).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_ip_sec_connection = oci.core.Ipsec("testIpSecConnection",
compartment_id=var["compartment_id"],
cpe_id=oci_core_cpe["test_cpe"]["id"],
drg_id=oci_core_drg["test_drg"]["id"],
static_routes=var["ip_sec_connection_static_routes"],
cpe_local_identifier=var["ip_sec_connection_cpe_local_identifier"],
cpe_local_identifier_type=var["ip_sec_connection_cpe_local_identifier_type"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["ip_sec_connection_display_name"],
freeform_tags={
"Department": "Finance",
})
```
## Import
IpSecConnections can be imported using the `id`, e.g.
```sh
$ pulumi import oci:core/ipsec:Ipsec test_ip_sec_connection "id"
```
:param str resource_name: The name of the resource.
:param IpsecArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IpsecArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpe_id: Optional[pulumi.Input[str]] = None,
cpe_local_identifier: Optional[pulumi.Input[str]] = None,
cpe_local_identifier_type: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
drg_id: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
static_routes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IpsecArgs.__new__(IpsecArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
if cpe_id is None and not opts.urn:
raise TypeError("Missing required property 'cpe_id'")
__props__.__dict__["cpe_id"] = cpe_id
__props__.__dict__["cpe_local_identifier"] = cpe_local_identifier
__props__.__dict__["cpe_local_identifier_type"] = cpe_local_identifier_type
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
if drg_id is None and not opts.urn:
raise TypeError("Missing required property 'drg_id'")
__props__.__dict__["drg_id"] = drg_id
__props__.__dict__["freeform_tags"] = freeform_tags
if static_routes is None and not opts.urn:
raise TypeError("Missing required property 'static_routes'")
__props__.__dict__["static_routes"] = static_routes
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
super(Ipsec, __self__).__init__(
'oci:core/ipsec:Ipsec',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpe_id: Optional[pulumi.Input[str]] = None,
cpe_local_identifier: Optional[pulumi.Input[str]] = None,
cpe_local_identifier_type: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
drg_id: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
state: Optional[pulumi.Input[str]] = None,
static_routes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
time_created: Optional[pulumi.Input[str]] = None) -> 'Ipsec':
"""
Get an existing Ipsec resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to contain the IPSec connection.
:param pulumi.Input[str] cpe_id: The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
:param pulumi.Input[str] cpe_local_identifier: (Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
:param pulumi.Input[str] cpe_local_identifier_type: (Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
:param pulumi.Input[str] drg_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] state: The IPSec connection's current state.
:param pulumi.Input[Sequence[pulumi.Input[str]]] static_routes: (Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
:param pulumi.Input[str] time_created: The date and time the IPSec connection was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IpsecState.__new__(_IpsecState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["cpe_id"] = cpe_id
__props__.__dict__["cpe_local_identifier"] = cpe_local_identifier
__props__.__dict__["cpe_local_identifier_type"] = cpe_local_identifier_type
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["drg_id"] = drg_id
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["state"] = state
__props__.__dict__["static_routes"] = static_routes
__props__.__dict__["time_created"] = time_created
return Ipsec(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of the compartment to contain the IPSec connection.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="cpeId")
def cpe_id(self) -> pulumi.Output[str]:
"""
The OCID of the [Cpe](https://docs.cloud.oracle.com/iaas/api/#/en/iaas/latest/Cpe/) object.
"""
return pulumi.get(self, "cpe_id")
@property
@pulumi.getter(name="cpeLocalIdentifier")
def cpe_local_identifier(self) -> pulumi.Output[str]:
"""
(Updatable) Your identifier for your CPE device. Can be either an IP address or a hostname (specifically, the fully qualified domain name (FQDN)). The type of identifier you provide here must correspond to the value for `cpeLocalIdentifierType`.
"""
return pulumi.get(self, "cpe_local_identifier")
@property
@pulumi.getter(name="cpeLocalIdentifierType")
def cpe_local_identifier_type(self) -> pulumi.Output[str]:
"""
(Updatable) The type of identifier for your CPE device. The value you provide here must correspond to the value for `cpeLocalIdentifier`.
"""
return pulumi.get(self, "cpe_local_identifier_type")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="drgId")
def drg_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DRG.
"""
return pulumi.get(self, "drg_id")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The IPSec connection's current state.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="staticRoutes")
def static_routes(self) -> pulumi.Output[Sequence[str]]:
"""
(Updatable) Static routes to the CPE. A static route's CIDR must not be a multicast address or class E address.
"""
return pulumi.get(self, "static_routes")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the IPSec connection was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
| 56.148148
| 347
| 0.678455
| 5,089
| 39,416
| 5.081155
| 0.063077
| 0.062108
| 0.056849
| 0.045092
| 0.935803
| 0.922771
| 0.91318
| 0.902197
| 0.889473
| 0.874468
| 0
| 0.004421
| 0.213796
| 39,416
| 701
| 348
| 56.228245
| 0.830004
| 0.498605
| 0
| 0.716292
| 1
| 0
| 0.105993
| 0.016346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162921
| false
| 0.002809
| 0.014045
| 0
| 0.275281
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6387acbbd6a2501be5ed964250f7e9e77d62082
| 7,178
|
py
|
Python
|
tests/test_coax_eig_mat.py
|
mnishida/pymwm
|
820d0a9056982fd37972b0e10f5dad9d1697ed2f
|
[
"MIT"
] | null | null | null |
tests/test_coax_eig_mat.py
|
mnishida/pymwm
|
820d0a9056982fd37972b0e10f5dad9d1697ed2f
|
[
"MIT"
] | null | null | null |
tests/test_coax_eig_mat.py
|
mnishida/pymwm
|
820d0a9056982fd37972b0e10f5dad9d1697ed2f
|
[
"MIT"
] | null | null | null |
import numpy as np
import numpy.testing as npt
from pymwm.coax.samples import Samples
from pymwm.utils import coax_utils
def test_eig_mat():
size = 0.15
fill = {"RI": 1.0}
clad = {"book": "Au", "page": "Stewart-DLF", "bound_check": False}
p = {
"wl_max": 10.0,
"wl_min": 1.0,
"wl_imag": 50.0,
"dw": 1.0 / 64,
"num_n": 6,
"num_m": 2,
}
size2 = 0.1
wg = Samples(size, fill, clad, p, size2)
h2 = -3500 + 6.3379717e5j
w = 2 * np.pi
e1 = wg.fill(w)
e2 = wg.clad(w)
eps = 1e-4
# a = np.empty((4, 4), dtype=complex)
# b = np.empty((4, 4), dtype=complex)
# a1 = np.empty((4, 4), dtype=complex)
# a2 = np.empty((4, 4), dtype=complex)
# b1 = np.empty((4, 4), dtype=complex)
# coax_utils.eig_mat_with_deriv(h2, w, "M", 0, e1, e2, size, size2, a, b)
# coax_utils.eig_mat_with_deriv(h2 + eps, w, "M", 0, e1, e2, size, size2, a1, b1)
# coax_utils.eig_mat_with_deriv(h2 - eps, w, "M", 0, e1, e2, size, size2, a2, b1)
a, b = wg.eig_mat(h2, w, "M", 0, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "M", 0, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "M", 0, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "M", 0, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "M", 0, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
# print(f"{b}")
# print(f"{da_dh2_1}")
# print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
a, b = wg.eig_mat(h2, w, "E", 0, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "E", 0, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "E", 0, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "E", 0, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "E", 0, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
# print(f"{b}")
# print(f"{da_dh2_1}")
# print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
# coax_utils.eig_mat_with_deriv(h2, w, "M", 1, e1, e2, size, size2, a, b)
# coax_utils.eig_mat_with_deriv(h2 + eps, w, "M", 1, e1, e2, size, size2, a1, b1)
# coax_utils.eig_mat_with_deriv(h2 - eps, w, "M", 1, e1, e2, size, size2, a2, b1)
a, b = wg.eig_mat(h2, w, "M", 1, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "M", 1, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "M", 1, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "M", 1, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "M", 1, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
print(f"{b}")
print(f"{da_dh2_1}")
print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
h2 = -3500 - 6.3379717e5j
a, b = wg.eig_mat(h2, w, "M", 0, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "M", 0, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "M", 0, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "M", 0, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "M", 0, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
# print(f"{b}")
# print(f"{da_dh2_1}")
# print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
a, b = wg.eig_mat(h2, w, "E", 0, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "E", 0, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "E", 0, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "E", 0, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "E", 0, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
# print(f"{b}")
# print(f"{da_dh2_1}")
# print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
a, b = wg.eig_mat(h2, w, "M", 1, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, "M", 1, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, "M", 1, e1, e2)
da_dh2_1 = (a1 - a2) / (2 * eps)
a3, _ = wg.eig_mat(h2 + eps * 1j, w, "M", 1, e1, e2)
a4, _ = wg.eig_mat(h2 - eps * 1j, w, "M", 1, e1, e2)
da_dh2_2 = -1j * (a3 - a4) / (2 * eps)
# print(f"{a}")
print(f"{b}")
print(f"{da_dh2_1}")
print(f"{da_dh2_2}")
npt.assert_almost_equal(b, da_dh2_1)
npt.assert_almost_equal(b, da_dh2_2)
def test_eig_eq():
size = 0.15
fill = {"RI": 1.0}
clad = {"book": "Au", "page": "Stewart-DLF", "bound_check": False}
p = {
"wl_max": 10.0,
"wl_min": 1.0,
"wl_imag": 50.0,
"dw": 1.0 / 64,
"num_n": 6,
"num_m": 2,
}
size2 = 0.1
wg = Samples(size, fill, clad, p, size2)
h2 = -350 + 60j
w = 2 * np.pi
e1 = wg.fill(w)
e2 = wg.clad(w)
eps = 1e-4
for pol in ["M", "E"]:
for n in range(6):
a, b = wg.eig_mat(h2, w, pol, n, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, pol, n, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, pol, n, e1, e2)
if n == 0:
if pol == "E":
a = a[:2, :2]
a1 = a1[:2, :2]
a2 = a2[:2, :2]
else:
a = a[2:, 2:]
a1 = a1[2:, 2:]
a2 = a2[2:, 2:]
f1, fp1 = wg.eig_eq(np.array([h2.real, h2.imag]), w, pol, n, e1, e2, np.array([]))
f2 = np.linalg.det(a)
fp2 = (np.linalg.det(a1) - np.linalg.det(a2)) / (2 * eps)
npt.assert_array_almost_equal(f1, [f2.real, f2.imag])
npt.assert_array_almost_equal(fp1, np.array(
[[fp2.real, fp2.imag], [-fp2.imag, fp2.real]]))
def test_eig_eq_cython():
size = 0.15
fill = {"RI": 1.0}
clad = {"book": "Au", "page": "Stewart-DLF", "bound_check": False}
p = {
"wl_max": 10.0,
"wl_min": 1.0,
"wl_imag": 50.0,
"dw": 1.0 / 64,
"num_n": 6,
"num_m": 2,
}
size2 = 0.1
wg = Samples(size, fill, clad, p, size2)
h2 = 300 + 6j
w = 2 * np.pi / 1.0
e1 = wg.fill(w)
e2 = wg.clad(w)
eps = 1e-4
for pol in ["M", "E"]:
for n in range(6):
a, b = wg.eig_mat(h2, w, pol, n, e1, e2)
a1, _ = wg.eig_mat(h2 + eps, w, pol, n, e1, e2)
a2, _ = wg.eig_mat(h2 - eps, w, pol, n, e1, e2)
if n == 0:
if pol == "E":
a = a[:2, :2]
a1 = a1[:2, :2]
a2 = a2[:2, :2]
else:
a = a[2:, 2:]
a1 = a1[2:, 2:]
a2 = a2[2:, 2:]
f1, fp1 = coax_utils.eig_eq_with_jac(
np.array([h2.real, h2.imag]), w, pol, n, e1, e2, size, size2, np.array([], dtype=complex)
)
f2 = np.linalg.det(a)
fp2 = (np.linalg.det(a1) - np.linalg.det(a2)) / (2 * eps)
npt.assert_array_almost_equal(f1, [f2.real, f2.imag])
npt.assert_array_almost_equal(fp1, np.array(
[[fp2.real, fp2.imag], [-fp2.imag, fp2.real]]))
| 35.534653
| 105
| 0.472834
| 1,276
| 7,178
| 2.481975
| 0.076803
| 0.055573
| 0.090938
| 0.113672
| 0.916325
| 0.916325
| 0.88317
| 0.882539
| 0.882539
| 0.864856
| 0
| 0.113468
| 0.323488
| 7,178
| 201
| 106
| 35.711443
| 0.538715
| 0.132906
| 0
| 0.884146
| 0
| 0
| 0.044721
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 1
| 0.018293
| false
| 0
| 0.02439
| 0
| 0.042683
| 0.036585
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c9f43d518d80e14b22c55a34a743515ff2cd200
| 244
|
py
|
Python
|
wechatpy/work/__init__.py
|
fuh/wechatpy
|
83c8ca93acef4149c5e61e3726c89b82052f17c1
|
[
"MIT"
] | 2,428
|
2015-07-04T08:55:29.000Z
|
2020-03-16T03:11:22.000Z
|
wechatpy/work/__init__.py
|
fuh/wechatpy
|
83c8ca93acef4149c5e61e3726c89b82052f17c1
|
[
"MIT"
] | 453
|
2015-06-18T10:39:34.000Z
|
2020-03-16T05:12:37.000Z
|
wechatpy/work/__init__.py
|
fuh/wechatpy
|
83c8ca93acef4149c5e61e3726c89b82052f17c1
|
[
"MIT"
] | 669
|
2015-06-18T10:08:12.000Z
|
2020-03-14T15:35:34.000Z
|
# -*- coding: utf-8 -*-
from wechatpy.work.client import WeChatClient # NOQA
from wechatpy.work.crypto import WeChatCrypto # NOQA
from wechatpy.work.parser import parse_message # NOQA
from wechatpy.work.replies import create_reply # NOQA
| 30.5
| 54
| 0.770492
| 33
| 244
| 5.636364
| 0.545455
| 0.258065
| 0.344086
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004785
| 0.143443
| 244
| 7
| 55
| 34.857143
| 0.885167
| 0.168033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9cd8069e741b5bc3114277a1eabc2fee64bdae1a
| 8,572
|
py
|
Python
|
tests/test_register_models.py
|
preply/dj_anonymizer
|
51800e93bb91201f4b0d914c765ea580b438c80a
|
[
"MIT"
] | 17
|
2019-05-30T20:11:25.000Z
|
2022-01-31T14:38:25.000Z
|
tests/test_register_models.py
|
preply/dj_anonymizer
|
51800e93bb91201f4b0d914c765ea580b438c80a
|
[
"MIT"
] | 39
|
2019-05-30T12:25:50.000Z
|
2022-02-07T12:58:06.000Z
|
tests/test_register_models.py
|
preply/dj_anonymizer
|
51800e93bb91201f4b0d914c765ea580b438c80a
|
[
"MIT"
] | 2
|
2019-09-27T12:11:03.000Z
|
2021-08-07T20:48:19.000Z
|
import datetime
import types
import pytest
from django.contrib.auth.models import Group, Permission, User
from django.db.models.query import QuerySet
from dj_anonymizer import fields, register_models
from dj_anonymizer.anonymizer import Anonymizer
@pytest.mark.django_db
def test_register_anonym():
class UserAnonym(register_models.AnonymBase):
email = fields.string('test_email_{seq}@preply.com',
seq_callback=datetime.datetime.now)
username = fields.string('test_username_{seq}@preply.com',
seq_callback=datetime.datetime.now)
first_name = fields.string('first name {seq}')
last_name = fields.string('last name {seq}')
password = fields.password('password')
is_staff = fields.function(lambda: False)
class Meta:
exclude_fields = ['is_active', 'is_superuser',
'last_login', 'date_joined']
register_models.register_anonym([
(User, UserAnonym)
])
assert len(Anonymizer.clean_models) == 0
assert len(Anonymizer.skip_models) == 0
assert len(Anonymizer.anonym_models) == 1
assert 'django.contrib.auth.models.User' in \
Anonymizer.anonym_models.keys()
assert isinstance(
Anonymizer.anonym_models[
'django.contrib.auth.models.User'
].Meta.queryset,
QuerySet
)
Anonymizer.anonym_models[
'django.contrib.auth.models.User'
].Meta.queryset.model is User
assert Anonymizer.anonym_models[
'django.contrib.auth.models.User'
].Meta.exclude_fields == [
'is_active', 'is_superuser', 'last_login', 'date_joined'
]
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].email,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].username,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].first_name,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].last_name,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].password,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].is_staff,
types.GeneratorType
)
assert next(
Anonymizer.anonym_models['django.contrib.auth.models.User'].first_name
) == 'first name 0'
assert next(
Anonymizer.anonym_models['django.contrib.auth.models.User'].is_staff
) is False
@pytest.mark.django_db
def test_register_anonym_no_exclude():
class UserAnonym(register_models.AnonymBase):
email = fields.string('test_email_{seq}@preply.com',
seq_callback=datetime.datetime.now)
username = fields.string('test_username_{seq}@preply.com',
seq_callback=datetime.datetime.now)
first_name = fields.string('first name {seq}')
last_name = fields.string('last name {seq}')
password = fields.password('password')
is_staff = fields.function(lambda: False)
register_models.register_anonym([
(User, UserAnonym)
])
assert len(Anonymizer.clean_models) == 0
assert len(Anonymizer.skip_models) == 0
assert len(Anonymizer.anonym_models) == 1
assert 'django.contrib.auth.models.User' in \
Anonymizer.anonym_models.keys()
assert isinstance(
Anonymizer.anonym_models[
'django.contrib.auth.models.User'
].Meta.queryset,
QuerySet
)
Anonymizer.anonym_models[
'django.contrib.auth.models.User'
].Meta.queryset.model is User
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].email,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].username,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].first_name,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].last_name,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].password,
types.GeneratorType
)
assert isinstance(
Anonymizer.anonym_models['django.contrib.auth.models.User'].is_staff,
types.GeneratorType
)
assert next(
Anonymizer.anonym_models['django.contrib.auth.models.User'].first_name
) == 'first name 0'
assert next(
Anonymizer.anonym_models['django.contrib.auth.models.User'].is_staff
) is False
def test_register_anonym_duplicate():
class UserAnonym(register_models.AnonymBase):
email = fields.string('test_email_{seq}@preply.com',
seq_callback=datetime.datetime.now)
username = fields.string('test_username_{seq}@preply.com',
seq_callback=datetime.datetime.now)
first_name = fields.string('first name {seq}')
last_name = fields.string('last name {seq}')
password = fields.password('password')
is_staff = fields.function(lambda: False)
class Meta:
exclude_fields = ['is_active', 'is_superuser',
'last_login', 'date_joined']
with pytest.raises(ValueError):
register_models.register_anonym([
(User, UserAnonym),
(User, UserAnonym),
])
@pytest.mark.django_db
def test_register_clean():
register_models.register_clean([
(User, register_models.AnonymBase),
(Permission, register_models.AnonymBase(truncate=True)),
(Group, register_models.AnonymBase())
])
assert len(Anonymizer.clean_models) == 3
assert len(Anonymizer.skip_models) == 0
assert len(Anonymizer.anonym_models) == 0
assert 'django.contrib.auth.models.User' in \
Anonymizer.clean_models.keys()
assert 'django.contrib.auth.models.Permission' in \
Anonymizer.clean_models.keys()
assert 'django.contrib.auth.models.Group' in \
Anonymizer.clean_models.keys()
assert isinstance(
Anonymizer.clean_models['django.contrib.auth.models.User'],
QuerySet
)
assert isinstance(
Anonymizer.clean_models['django.contrib.auth.models.Permission'],
QuerySet
)
assert isinstance(
Anonymizer.clean_models['django.contrib.auth.models.Group'],
QuerySet
)
assert Anonymizer.clean_models[
"django.contrib.auth.models.User"
].model is User
assert Anonymizer.clean_models[
"django.contrib.auth.models.Permission"
].model is Permission
assert Anonymizer.clean_models[
"django.contrib.auth.models.Group"
].model is Group
@pytest.mark.django_db
def test_register_clean_duplicate():
with pytest.raises(ValueError):
register_models.register_clean([
(User, register_models.AnonymBase),
(User, register_models.AnonymBase),
(Permission, register_models.AnonymBase(truncate=True)),
(Group, register_models.AnonymBase())
])
@pytest.mark.django_db
def test_register_clean_mixed_args():
with pytest.raises(TypeError):
register_models.register_clean([
(User, register_models.AnonymBase),
(register_models.AnonymBase, Permission)
])
@pytest.mark.django_db
def test_register_clean_none():
with pytest.raises(TypeError):
register_models.register_clean([
(User, register_models.AnonymBase),
(Permission, None)
])
@pytest.mark.django_db
def test_register_skip():
register_models.register_skip([User, Permission, Group])
assert len(Anonymizer.clean_models) == 0
assert len(Anonymizer.skip_models) == 3
assert len(Anonymizer.anonym_models) == 0
assert 'django.contrib.auth.models.User' in Anonymizer.skip_models
assert 'django.contrib.auth.models.Permission' in Anonymizer.skip_models
assert 'django.contrib.auth.models.Group' in Anonymizer.skip_models
@pytest.mark.django_db
def test_register_skip_duplicate():
with pytest.raises(ValueError):
register_models.register_skip([User, Permission, Group, Group])
| 32.225564
| 79
| 0.664722
| 935
| 8,572
| 5.915508
| 0.079144
| 0.084614
| 0.110649
| 0.149702
| 0.933828
| 0.926053
| 0.919725
| 0.898752
| 0.773459
| 0.73079
| 0
| 0.002117
| 0.228418
| 8,572
| 265
| 80
| 32.34717
| 0.83414
| 0
| 0
| 0.743243
| 0
| 0
| 0.180938
| 0.14979
| 0
| 0
| 0
| 0
| 0.202703
| 1
| 0.040541
| false
| 0.022523
| 0.031532
| 0
| 0.175676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ce6fec449bc7396790896dd1496a8f5dd710e2c
| 4,727
|
py
|
Python
|
tests/test_correct_sampling.py
|
mibaumgartner/hackathon_health
|
e3ab4971ecb4efd0e43c583104b8485c548320d5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_correct_sampling.py
|
mibaumgartner/hackathon_health
|
e3ab4971ecb4efd0e43c583104b8485c548320d5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_correct_sampling.py
|
mibaumgartner/hackathon_health
|
e3ab4971ecb4efd0e43c583104b8485c548320d5
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from typing import List
import numpy as np
from medhack.distributed_sampler import WeightedDistributedRandomSampler
class CorrectSamplingTest(unittest.TestCase):
def test_is_balanced(self):
n_sampling = 10000
n_negatives = 1000
n_positives = 100
negatives = [0 for _ in range(n_negatives)]
positives = [1 for _ in range(n_positives)]
all = negatives + positives
positives = sum(all)
n_all = len(all)
prob_positive = 1 - positives / n_all
prob_negatives = positives / n_all
weights = [prob_negatives if sample == 0 else prob_positive for sample in all]
sampler_a = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=0
)
all_samples: List[int] = []
for n, sample_id in enumerate(sampler_a):
if n > n_sampling:
break
all_samples.append(all[sample_id])
mean = float(np.mean(all_samples))
print(f"Mean {mean}")
self.assertAlmostEqual(0.5, mean, delta=0.05)
def test_is_balanced_other_rank(self):
n_sampling = 10000
n_negatives = 1000
n_positives = 100
negatives = [0 for _ in range(n_negatives)]
positives = [1 for _ in range(n_positives)]
all = negatives + positives
positives = sum(all)
n_all = len(all)
prob_positive = 1 - positives / n_all
prob_negatives = positives / n_all
weights = [prob_negatives if sample == 0 else prob_positive for sample in all]
sampler_a = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=10
)
all_samples: List[int] = []
for n, sample_id in enumerate(sampler_a):
if n > n_sampling:
break
all_samples.append(all[sample_id])
mean = float(np.mean(all_samples))
print(f"Mean {mean}")
self.assertAlmostEqual(0.5, mean, delta=0.05)
def test_is_different(self):
n_sampling = 10000
n_negatives = 1000
n_positives = 100
negatives = [0 for _ in range(n_negatives)]
positives = [1 for _ in range(n_positives)]
all = negatives + positives
positives = sum(all)
n_all = len(all)
prob_positive = 1 - positives / n_all
prob_negatives = positives / n_all
weights = [prob_negatives if sample == 0 else prob_positive for sample in all]
sampler_a = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=0
)
sampler_b = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=1
)
all_sample_ids_a: List[int] = []
for n, sample_id in enumerate(sampler_a):
if n > n_sampling:
break
all_sample_ids_a.append(sample_id)
all_sample_ids_b: List[int] = []
for n, sample_id in enumerate(sampler_b):
if n > n_sampling:
break
all_sample_ids_b.append(sample_id)
self.assertNotEqual(all_sample_ids_a, all_sample_ids_b)
def test_is_equal(self):
n_sampling = 10000
n_negatives = 1000
n_positives = 100
negatives = [0 for _ in range(n_negatives)]
positives = [1 for _ in range(n_positives)]
all = negatives + positives
positives = sum(all)
n_all = len(all)
prob_positive = 1 - positives / n_all
prob_negatives = positives / n_all
weights = [prob_negatives if sample == 0 else prob_positive for sample in all]
sampler_a = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=0
)
sampler_b = WeightedDistributedRandomSampler(
weights,
num_samples=len(all),
replacement=True,
rank=0
)
all_sample_ids_a: List[int] = []
for n, sample_id in enumerate(sampler_a):
if n > n_sampling:
break
all_sample_ids_a.append(sample_id)
all_sample_ids_b: List[int] = []
for n, sample_id in enumerate(sampler_b):
if n > n_sampling:
break
all_sample_ids_b.append(sample_id)
self.assertEqual(all_sample_ids_a, all_sample_ids_b)
if __name__ == "__main__":
unittest.main()
| 27.643275
| 86
| 0.581764
| 553
| 4,727
| 4.696203
| 0.126582
| 0.048518
| 0.055449
| 0.033885
| 0.898729
| 0.898729
| 0.898729
| 0.898729
| 0.878706
| 0.878706
| 0
| 0.02607
| 0.342712
| 4,727
| 170
| 87
| 27.805882
| 0.809784
| 0
| 0
| 0.837209
| 0
| 0
| 0.006347
| 0
| 0
| 0
| 0
| 0
| 0.031008
| 1
| 0.031008
| false
| 0
| 0.031008
| 0
| 0.069767
| 0.015504
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
140043c0f3c2527f414afc73800b1484462f34ff
| 34,239
|
py
|
Python
|
sos_trades_core/tests/l0_test_55_ms_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 8
|
2022-01-10T14:44:28.000Z
|
2022-03-31T08:57:14.000Z
|
sos_trades_core/tests/l0_test_55_ms_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | null | null | null |
sos_trades_core/tests/l0_test_55_ms_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 1
|
2022-02-21T14:51:45.000Z
|
2022-02-21T14:51:45.000Z
|
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
'''
mode: python; py-indent-offset: 4; tab-width: 4; coding: utf-8
'''
import unittest
import pandas as pd
from sos_trades_core.execution_engine.execution_engine import ExecutionEngine
from tempfile import gettempdir
class TestMultiScenarioArchiBuilder(unittest.TestCase):
"""
Multi scenario of architecture builder test class
"""
def setUp(self):
'''
Initialize third data needed for testing
'''
self.dirs_to_del = []
self.namespace = 'MyCase'
self.study_name = f'{self.namespace}'
self.exec_eng = ExecutionEngine(self.namespace)
self.factory = self.exec_eng.factory
self.root_dir = gettempdir()
def test_01_very_simple_multi_scenario_of_simple_architecture(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
vb_builder_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Airbus'],
'Current': ['Airbus', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('scatter', 'AC_list', 'ValueBlockDiscipline')],
'Activation': [True, False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_ac', 'ns_services', 'ns_coc', 'ns_rc', 'ns_nrc', 'ns_market']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
multi_scenarios = self.exec_eng.factory.create_very_simple_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(
multi_scenarios)
self.exec_eng.ns_manager.add_ns_def({'ns_public': f'{self.study_name}.multi_scenarios',
'ns_services': f'{self.study_name}.multi_scenarios',
'ns_ac': f'{self.study_name}.multi_scenarios',
'ns_coc': f'{self.study_name}.multi_scenarios',
'ns_data_ac': f'{self.study_name}.multi_scenarios',
'ns_business_ac': f'{self.study_name}.multi_scenarios',
'ns_rc': f'{self.study_name}.multi_scenarios',
'ns_nrc': f'{self.study_name}.multi_scenarios',
'ns_market': f'{self.study_name}.multi_scenarios',
'ns_market_in': f'{self.study_name}.multi_scenarios',
'ns_scenario': f'{self.study_name}.multi_scenarios'})
self.exec_eng.configure()
dict_values = {f'{self.study_name}.multi_scenarios.scenario_list': [
'scenario_1', 'scenario_2']}
self.exec_eng.load_study_from_input_dict(dict_values)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df_1 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC1', 'AC2'],
'AC_Sales': [True, True]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC3', 'AC4'],
'AC_Sales': [True, True]})
dict_values = {f'{self.study_name}.multi_scenarios.scenario_1.Business.activation_df': activation_df_1,
f'{self.study_name}.multi_scenarios.scenario_2.Business.activation_df': activation_df_2}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t|_ AC2',
'\t\t|_ scenario_2',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC3',
'\t\t\t\t\t\t|_ AC4', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
self.assertListEqual(list(self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_1.Business.Airbus.AC_Sales.AC_list')), ['AC1', 'AC2'])
self.assertListEqual(list(self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_2.Business.Airbus.AC_Sales.AC_list')), ['AC3', 'AC4'])
activation_df_1 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC1', 'AC2'],
'AC_Sales': [True, False]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC3', 'AC4'],
'AC_Sales': [True, False]})
dict_values = {f'{self.study_name}.multi_scenarios.scenario_1.Business.activation_df': activation_df_1,
f'{self.study_name}.multi_scenarios.scenario_2.Business.activation_df': activation_df_2, }
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC1',
'\t\t|_ scenario_2',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC3', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
self.assertListEqual(list(self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_1.Business.Airbus.AC_Sales.AC_list')), ['AC1'])
self.assertListEqual(list(self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_2.Business.Airbus.AC_Sales.AC_list')), ['AC3'])
def test_02_very_simple_multi_scenario_of_architecture_scatter_of_scatter(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
# subsystem scatter map dict
subsystem_services_map_dict = {'input_name': 'subsystems_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'subsystem',
'scatter_ns': 'ns_subsystem',
# 'gather_ns': 'ns_services_subsystem',
'gather_ns': 'ns_ac_subsystem',
'gather_ns_out': 'ns_ac',
# add scatter name to this namespace
# , 'ns_ac']
}
# add subsystem map
self.exec_eng.smaps_manager.add_build_map(
'subsystems_list', subsystem_services_map_dict)
vb_builder_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'Airbus', 'Airbus', 'Boeing', 'Services', 'Services'],
'Current': ['Airbus', 'Boeing', 'AC_Sales', 'Services', 'AC_Sales', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('scatter', 'AC_list', 'ValueBlockDiscipline'), ('standard'), ('scatter', 'AC_list', 'ValueBlockDiscipline'),
('scatter', 'AC_list', ('scatter', 'subsystems_list', 'ValueBlockDiscipline')), ('scatter', 'AC_list', ('scatter', 'subsystems_list', 'ValueBlockDiscipline'))],
'Activation': [True, True, False, False, False, False, False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_ac', 'ns_services', 'ns_coc', 'ns_rc', 'ns_nrc', 'ns_market']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
multi_scenarios = self.exec_eng.factory.create_very_simple_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(
multi_scenarios)
self.exec_eng.ns_manager.add_ns_def({'ns_public': f'{self.study_name}.multi_scenarios',
'ns_services': f'{self.study_name}.multi_scenarios',
'ns_ac': f'{self.study_name}.multi_scenarios',
'ns_coc': f'{self.study_name}.multi_scenarios',
'ns_data_ac': f'{self.study_name}.multi_scenarios',
'ns_business_ac': f'{self.study_name}.multi_scenarios',
'ns_rc': f'{self.study_name}.multi_scenarios',
'ns_nrc': f'{self.study_name}.multi_scenarios',
'ns_market': f'{self.study_name}.multi_scenarios',
'ns_market_in': f'{self.study_name}.multi_scenarios',
'ns_scenario': f'{self.study_name}.multi_scenarios',
'ns_services_ac': f'{self.study_name}.multi_scenarios'})
self.exec_eng.configure()
dict_values = {f'{self.study_name}.multi_scenarios.scenario_list': [
'scenario_1', 'scenario_2']}
self.exec_eng.load_study_from_input_dict(dict_values)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Boeing',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Boeing',
'\t\t\t\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df_1 = pd.DataFrame({'Business': ['Airbus', 'Airbus', 'Boeing', 'Boeing'],
'AC_list': ['AC1', 'AC2', 'AC3', 'AC4'],
'subsystems_list': ['Airframe', 'Propulsion', 'Propulsion', 'Propulsion', ],
'AC_Sales': [True, True, True, True],
'Services': [True, True, False, False],
'FHS': [True, True, False, False],
'OSS': [True, True, False, False]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus', 'Airbus', 'Airbus', 'Boeing', 'Boeing'],
'AC_list': ['AC1', 'AC2', 'AC3', 'AC4', 'AC5'],
'subsystems_list': ['Airframe', 'Propulsion', 'Propulsion', 'Propulsion', 'Propulsion'],
'AC_Sales': [True, True, True, True, True],
'Services': [True, False, True, False, False],
'FHS': [True, False, False, False, False],
'OSS': [True, False, True, False, False]})
dict_values = {f'{self.study_name}.multi_scenarios.scenario_1.Business.activation_df': activation_df_1,
f'{self.study_name}.multi_scenarios.scenario_2.Business.activation_df': activation_df_2, }
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t|_ AC2',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t\t\t|_ Airframe',
'\t\t\t\t\t\t\t|_ AC2',
'\t\t\t\t\t\t\t\t|_ Propulsion',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t\t\t|_ Airframe',
'\t\t\t\t\t\t\t|_ AC2',
'\t\t\t\t\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Boeing',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC3',
'\t\t\t\t\t\t|_ AC4',
'\t\t|_ scenario_2',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t|_ AC2',
'\t\t\t\t\t\t|_ AC3',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t\t\t|_ Airframe',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t\t\t|_ Airframe',
'\t\t\t\t\t\t\t|_ AC3',
'\t\t\t\t\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Boeing',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC4',
'\t\t\t\t\t\t|_ AC5']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_03_multi_scenario_of_architecture(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
vb_builder_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Airbus'],
'Current': ['Airbus', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('scatter', 'AC_list', 'ValueBlockDiscipline')],
'Activation': [True, False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_ac', 'ns_services', 'ns_coc', 'ns_rc', 'ns_nrc', 'ns_market']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
multi_scenarios = self.exec_eng.factory.create_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(
multi_scenarios)
self.exec_eng.ns_manager.add_ns_def({'ns_public': f'{self.study_name}.multi_scenarios',
'ns_services': f'{self.study_name}.multi_scenarios',
'ns_ac': f'{self.study_name}.multi_scenarios',
'ns_coc': f'{self.study_name}.multi_scenarios',
'ns_data_ac': f'{self.study_name}.multi_scenarios',
'ns_business_ac': f'{self.study_name}.multi_scenarios',
'ns_rc': f'{self.study_name}.multi_scenarios',
'ns_nrc': f'{self.study_name}.multi_scenarios',
'ns_market': f'{self.study_name}.multi_scenarios',
'ns_scenario': f'{self.study_name}.multi_scenarios'})
self.exec_eng.configure()
activation_df_1 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC1', 'AC2'],
'AC_Sales': [True, True]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['AC3', 'AC4'],
'AC_Sales': [True, True]})
dict_values = {f'{self.study_name}.multi_scenarios.activation_df_trade': [activation_df_1, activation_df_2],
f'{self.study_name}.multi_scenarios.trade_variables': {'activation_df': 'dataframe'}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC1',
'\t\t\t\t\t\t|_ AC2',
'\t\t|_ scenario_2',
f'\t\t\t|_ {vb_builder_name}',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ AC3',
'\t\t\t\t\t\t|_ AC4']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_04_very_simple_multi_scenario_with_sub_architecture(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
vb_builder_name = 'Business'
subarchitecture_df = pd.DataFrame(
{'Parent': ['AC_Sales', 'AC_Sales'],
'Current': ['Propu', 'Airframe'],
'Type': ['ValueBlockDiscipline', 'ValueBlockDiscipline'],
'Action': [('standard'), ('standard')],
'Activation': [False, False]})
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Airbus'],
'Current': ['Airbus', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('architecture', subarchitecture_df)],
'Activation': [True, False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_ac', 'ns_services', 'ns_coc', 'ns_rc', 'ns_nrc', 'ns_market']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
multi_scenarios = self.exec_eng.factory.create_very_simple_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(
multi_scenarios)
self.exec_eng.ns_manager.add_ns_def({'ns_public': f'{self.study_name}.multi_scenarios',
'ns_services': f'{self.study_name}.multi_scenarios',
'ns_ac': f'{self.study_name}.multi_scenarios',
'ns_coc': f'{self.study_name}.multi_scenarios',
'ns_data_ac': f'{self.study_name}.multi_scenarios',
'ns_business_ac': f'{self.study_name}.multi_scenarios',
'ns_rc': f'{self.study_name}.multi_scenarios',
'ns_nrc': f'{self.study_name}.multi_scenarios',
'ns_market': f'{self.study_name}.multi_scenarios',
'ns_scenario': f'{self.study_name}.multi_scenarios'})
self.exec_eng.configure()
dict_values = {f'{self.study_name}.multi_scenarios.scenario_list': [
'scenario_1', 'scenario_2']}
self.exec_eng.load_study_from_input_dict(dict_values)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ Propu',
'\t\t\t\t\t\t|_ Airframe',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ Propu',
'\t\t\t\t\t\t|_ Airframe']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df_1 = pd.DataFrame({'Business': ['Airbus'],
'AC_Sales': [True],
'Propu': [True],
'Airframe': [True]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus'],
'AC_Sales': [True],
'Propu': [True],
'Airframe': [False]})
dict_values = {f'{self.study_name}.multi_scenarios.scenario_1.Business.activation_df': activation_df_1,
f'{self.study_name}.multi_scenarios.scenario_2.Business.activation_df': activation_df_2, }
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ Propu',
'\t\t\t\t\t\t|_ Airframe',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ Propu']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_05_very_simple_multi_scenario_with_scatter_architecture(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_scenario',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
vb_builder_name = 'Business'
subarchitecture_df = pd.DataFrame(
{'Parent': ['AC_Sales', 'AC_Sales'],
'Current': ['Propu', 'Airframe'],
'Type': ['ValueBlockDiscipline', 'ValueBlockDiscipline'],
'Action': [('standard'), ('standard')],
'Activation': [False, False]})
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Airbus'],
'Current': ['Airbus', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('scatter_architecture', 'AC_list', 'SumValueBlockDiscipline', subarchitecture_df)],
'Activation': [True, False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_ac', 'ns_services', 'ns_coc', 'ns_rc', 'ns_nrc', 'ns_market']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
multi_scenarios = self.exec_eng.factory.create_very_simple_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(
multi_scenarios)
self.exec_eng.ns_manager.add_ns_def({'ns_public': f'{self.study_name}.multi_scenarios',
'ns_services': f'{self.study_name}.multi_scenarios',
'ns_ac': f'{self.study_name}.multi_scenarios',
'ns_coc': f'{self.study_name}.multi_scenarios',
'ns_data_ac': f'{self.study_name}.multi_scenarios',
'ns_business_ac': f'{self.study_name}.multi_scenarios',
'ns_rc': f'{self.study_name}.multi_scenarios',
'ns_nrc': f'{self.study_name}.multi_scenarios',
'ns_market': f'{self.study_name}.multi_scenarios',
'ns_scenario': f'{self.study_name}.multi_scenarios'})
self.exec_eng.configure()
dict_values = {f'{self.study_name}.multi_scenarios.scenario_list': [
'scenario_1', 'scenario_2']}
self.exec_eng.load_study_from_input_dict(dict_values)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df_1 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['A1', 'A2'],
'AC_Sales': [True, True],
'Propu': [True, True],
'Airframe': [True, True]})
activation_df_2 = pd.DataFrame({'Business': ['Airbus', 'Airbus'],
'AC_list': ['A3', 'A4'],
'AC_Sales': [True, False],
'Propu': [False, True],
'Airframe': [True, True]})
dict_values = {f'{self.study_name}.multi_scenarios.scenario_1.Business.activation_df': activation_df_1,
f'{self.study_name}.multi_scenarios.scenario_2.Business.activation_df': activation_df_2, }
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ multi_scenarios',
'\t\t|_ scenario_1',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ A1',
'\t\t\t\t\t\t\t|_ Propu',
'\t\t\t\t\t\t\t|_ Airframe',
'\t\t\t\t\t\t|_ A2',
'\t\t\t\t\t\t\t|_ Propu',
'\t\t\t\t\t\t\t|_ Airframe',
'\t\t|_ scenario_2',
'\t\t\t|_ Business',
'\t\t\t\t|_ Airbus',
'\t\t\t\t\t|_ AC_Sales',
'\t\t\t\t\t\t|_ A3',
'\t\t\t\t\t\t\t|_ Airframe', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
if '__main__' == __name__:
cls = TestMultiScenarioArchiBuilder()
cls.setUp()
cls.test_03_multi_scenario_of_architecture()
| 50.277533
| 211
| 0.486171
| 3,697
| 34,239
| 4.157425
| 0.056262
| 0.071568
| 0.078465
| 0.071308
| 0.893038
| 0.875407
| 0.854262
| 0.854131
| 0.850098
| 0.831165
| 0
| 0.006994
| 0.377815
| 34,239
| 680
| 212
| 50.351471
| 0.7145
| 0.022577
| 0
| 0.849265
| 0
| 0
| 0.344419
| 0.103491
| 0
| 0
| 0
| 0
| 0.025735
| 1
| 0.011029
| false
| 0
| 0.007353
| 0
| 0.020221
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14a63b8d93c4b932003d89c852195a1f473ecdfb
| 20,164
|
py
|
Python
|
tests/test_affine_transform.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
tests/test_affine_transform.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
tests/test_affine_transform.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
from deoxys_image import affine_transform_matrix, apply_affine_transform, \
get_rotation_matrix, get_zoom_matrix, get_shift_matrix, apply_flip
def test_get_rotation_matrix_error():
with pytest.raises(ValueError):
get_rotation_matrix(rotation_axis=0, theta=30, rank=2)
def test_get_rotation_matrix():
get_rotation_matrix(rotation_axis=0, theta=30, rank=3)
get_rotation_matrix(rotation_axis=1, theta=30, rank=3)
get_rotation_matrix(rotation_axis=2, theta=30, rank=3)
rotation_matrix = get_rotation_matrix(rotation_axis=0, theta=30, rank=4)
assert np.allclose(rotation_matrix[:, -1], [0, 0, 0, 1])
assert np.allclose(rotation_matrix[-1, :], [0, 0, 0, 1])
rotation_matrix = get_rotation_matrix(rotation_axis=1, theta=30, rank=4)
assert np.allclose(rotation_matrix[:, -1], [0, 0, 0, 1])
assert np.allclose(rotation_matrix[-1, :], [0, 0, 0, 1])
rotation_matrix = get_rotation_matrix(rotation_axis=2, theta=30, rank=4)
assert np.allclose(rotation_matrix[:, -1], [0, 0, 0, 1])
assert np.allclose(rotation_matrix[-1, :], [0, 0, 0, 1])
def test_get_shift_matrix_error():
with pytest.raises(ValueError):
get_shift_matrix((14, 13), rank=4)
with pytest.raises(ValueError):
get_shift_matrix((14, 13, 13), rank=3)
def test_get_shift_matrix():
shift_matrix = get_shift_matrix((30, 10), rank=3)
assert np.all(shift_matrix == np.array([[1, 0, 30],
[0, 1, 10],
[0, 0, 1]]))
shift_matrix = get_shift_matrix((30, 20, 10), rank=4)
assert np.all(shift_matrix == np.array([[1, 0, 0, 30],
[0, 1, 0, 20],
[0, 0, 1, 10],
[0, 0, 0, 1]]))
def test_get_zoom_matrix():
zoom_matrix = get_zoom_matrix(2, rank=3)
assert np.all(zoom_matrix == np.array([[2, 0, 0],
[0, 2, 0],
[0, 0, 1]]))
zoom_matrix = get_zoom_matrix(0.5, rank=4)
assert np.all(zoom_matrix == np.array([[0.5, 0, 0, 0],
[0, 0.5, 0, 0],
[0, 0, 0.5, 0],
[0, 0, 0, 1]]))
def test_affine_transform_matrix():
transform_matrix = affine_transform_matrix(
rank=3, theta=90)
assert np.allclose(transform_matrix, np.array([[0, -1, 0],
[1, 0, 0],
[0, 0, 1]]))
transform_matrix = affine_transform_matrix(
rank=3, zoom_factor=2)
assert np.allclose(transform_matrix, np.array([[0.5, 0, 0],
[0, 0.5, 0],
[0, 0, 1]]))
transform_matrix = affine_transform_matrix(
rank=3, theta=90, zoom_factor=2, shift=(10, 20))
assert np.allclose(transform_matrix, np.array([[0, -0.5, 0],
[0.5, 0, 0],
[0, 0, 1]]))
def test_apply_affine_transform():
image = np.zeros((5, 5, 2))
image[..., 0] = [[0, 0, 1, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0]]
image[..., 1] = [[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0]]
res = apply_affine_transform(image, theta=90, mode='constant', cval=0)
expected = np.zeros((5, 5, 2))
expected[..., 0] = [[0, 0, 1, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0]]
expected[..., 1] = [[0, 0, 1, 0, 1],
[0, 0, 1, 0, 1],
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 1],
[0, 0, 1, 0, 1]]
assert np.allclose(res, expected)
image = np.zeros((5, 5, 2))
image[..., 0] = [[0, 0, 1, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0]]
image[..., 1] = [[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0]]
res = apply_affine_transform(image, shift=(1, 0), mode='constant', cval=0)
expected = np.zeros((5, 5, 2))
expected[..., 0] = [[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0]]
expected[..., 1] = [[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
def test_apply_affine_transform_3d():
image = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
image[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[2][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
image[0][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
image[1][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
image[2][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
res = apply_affine_transform(
image, theta=90, rotation_axis=0, mode='constant', cval=0)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
expected[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[2][..., 0] = [[0, 1, 0],
[0, 1, 0],
[0, 1, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[0, 0, 1],
[0, 0, 0],
[0, 0, 1]]
expected[1][..., 1] = [[0, 0, 1],
[0, 0, 1],
[0, 0, 1]]
expected[2][..., 1] = [[0, 0, 1],
[0, 0, 0],
[0, 0, 1]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
image = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
image[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[2][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
image[0][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
image[1][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
image[2][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
res = apply_affine_transform(
image, shift=(1, 0, 0), mode='constant', cval=0)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
# expected[0][..., 0] = [[0, 0, 0],
# [0, 1, 0],
# [0, 0, 0]]
expected[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
expected[1][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
res = apply_affine_transform(
image, shift=(0, 1, 0), mode='constant', cval=0)
expected = np.zeros((3, 3, 3, 2))
expected[0][..., 0] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0], ]
expected[1][..., 0] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[2][..., 0] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
res = apply_affine_transform(
image, shift=(0, 0, 1), mode='constant', cval=0)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
expected[0][..., 0] = [[0, 0, 0],
[1, 0, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 0, 0],
[0, 0, 0]]
expected[2][..., 0] = [[0, 0, 0],
[1, 1, 0],
[0, 0, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[1][..., 1] = [[1, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[2][..., 1] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
def test_apply_affine_transform_3d_nosplit():
image = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
image[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[2][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
image[0][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
image[1][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
image[2][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
res = apply_affine_transform(
image, theta=90, rotation_axis=0, mode='constant',
cval=0, use_3d_transform=True)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
expected[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[2][..., 0] = [[0, 1, 0],
[0, 1, 0],
[0, 1, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[0, 0, 1],
[0, 0, 0],
[0, 0, 1]]
expected[1][..., 1] = [[0, 0, 1],
[0, 0, 1],
[0, 0, 1]]
expected[2][..., 1] = [[0, 0, 1],
[0, 0, 0],
[0, 0, 1]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
image = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
image[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[1][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
image[2][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
image[0][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
image[1][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
image[2][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
res = apply_affine_transform(
image, shift=(1, 0, 0), mode='constant', cval=0, use_3d_transform=True)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
# expected[0][..., 0] = [[0, 0, 0],
# [0, 1, 0],
# [0, 0, 0]]
expected[0][..., 0] = [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
expected[1][..., 1] = [[1, 0, 1],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
res = apply_affine_transform(
image, shift=(0, 1, 0), mode='constant', cval=0, use_3d_transform=True)
expected = np.zeros((3, 3, 3, 2))
expected[0][..., 0] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0], ]
expected[1][..., 0] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[2][..., 0] = [[1, 1, 1],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
res = apply_affine_transform(
image, shift=(0, 0, 1), mode='constant', cval=0, use_3d_transform=True)
expected = np.zeros((3, 3, 3, 2))
# 3d T in the first channel
expected[0][..., 0] = [[0, 0, 0],
[1, 0, 0],
[0, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 0, 0],
[0, 0, 0]]
expected[2][..., 0] = [[0, 0, 0],
[1, 1, 0],
[0, 0, 0]]
# 3d H in the second channel
expected[0][..., 1] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[1][..., 1] = [[1, 1, 0],
[0, 0, 0],
[0, 0, 0]]
expected[2][..., 1] = [[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]
assert np.allclose(res, expected)
assert np.all(np.rint(res) == expected)
def test_apply_affine_transform_3d_correct():
image = np.random.random((4, 4, 4, 2))
res = apply_affine_transform(
image, theta=30, rotation_axis=0,
shift=[-1, 1, 2], mode='constant',
cval=0)
expected = apply_affine_transform(
image, theta=30, rotation_axis=0,
shift=[-1, 1, 2], mode='constant',
cval=0, use_3d_transform=True)
assert np.allclose(res, expected)
image = np.random.random((4, 4, 4, 2))
res = apply_affine_transform(
image, theta=-30, rotation_axis=1,
shift=[2, -1, 1], mode='constant',
cval=0)
expected = apply_affine_transform(
image, theta=-30, rotation_axis=1,
shift=[2, -1, 1], mode='constant',
cval=0, use_3d_transform=True)
assert np.allclose(res, expected)
image = np.random.random((4, 4, 4, 2))
res = apply_affine_transform(
image, theta=90, rotation_axis=2,
shift=[1, 2, -1], mode='constant',
cval=0)
expected = apply_affine_transform(
image, theta=90, rotation_axis=2,
shift=[1, 2, -1], mode='constant',
cval=0, use_3d_transform=True)
assert np.allclose(res, expected)
def test_flip_2d():
image = np.zeros((3, 3, 2))
image[..., 0] = [[1, 1, 0],
[1, 1, 0],
[0, 0, 0]]
image[..., 1] = [[1, 1, 1],
[1, 1, 0],
[1, 0, 0]]
res = apply_flip(image, 0)
expected = np.zeros((3, 3, 2))
expected[..., 0] = [[0, 0, 0],
[1, 1, 0],
[1, 1, 0]]
expected[..., 1] = [[1, 0, 0],
[1, 1, 0],
[1, 1, 1]]
assert np.all(res == expected)
res = apply_flip(image, 1)
expected = np.zeros((3, 3, 2))
expected[..., 0] = [[0, 1, 1],
[0, 1, 1],
[0, 0, 0]]
expected[..., 1] = [[1, 1, 1],
[0, 1, 1],
[0, 0, 1]]
assert np.all(res == expected)
res = apply_flip(image, axis=(0, 1))
expected = np.zeros((3, 3, 2))
expected[..., 0] = [[0, 0, 0],
[0, 1, 1],
[0, 1, 1]]
expected[..., 1] = [[0, 0, 1],
[0, 1, 1],
[1, 1, 1]]
assert np.all(res == expected)
def test_flip_3d():
image = np.zeros((3, 3, 3, 1))
image[0][..., 0] = [[1, 0, 0],
[1, 0, 0],
[1, 0, 0]]
image[1][..., 0] = [[1, 0, 0],
[1, 0, 0],
[0, 0, 0]]
image[2][..., 0] = [[1, 0, 0],
[0, 0, 0],
[0, 0, 0]]
res = apply_flip(image, axis=0)
expected = np.zeros((3, 3, 3, 1))
expected[2][..., 0] = [[1, 0, 0],
[1, 0, 0],
[1, 0, 0]]
expected[1][..., 0] = [[1, 0, 0],
[1, 0, 0],
[0, 0, 0]]
expected[0][..., 0] = [[1, 0, 0],
[0, 0, 0],
[0, 0, 0]]
assert np.all(res == expected)
res = apply_flip(image, axis=1)
expected = np.zeros((3, 3, 3, 1))
expected[0][..., 0] = [[1, 0, 0],
[1, 0, 0],
[1, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 0, 0],
[1, 0, 0]]
expected[2][..., 0] = [[0, 0, 0],
[0, 0, 0],
[1, 0, 0]]
assert np.all(res == expected)
res = apply_flip(image, axis=(0, 1))
expected = np.zeros((3, 3, 3, 1))
expected[2][..., 0] = [[1, 0, 0],
[1, 0, 0],
[1, 0, 0]]
expected[1][..., 0] = [[0, 0, 0],
[1, 0, 0],
[1, 0, 0]]
expected[0][..., 0] = [[0, 0, 0],
[0, 0, 0],
[1, 0, 0]]
assert np.all(res == expected)
res = apply_flip(image, axis=(0, 1, 2))
expected = np.zeros((3, 3, 3, 1))
expected[2][..., 0] = [[0, 0, 1],
[0, 0, 1],
[0, 0, 1]]
expected[1][..., 0] = [[0, 0, 0],
[0, 0, 1],
[0, 0, 1]]
expected[0][..., 0] = [[0, 0, 0],
[0, 0, 0],
[0, 0, 1]]
assert np.all(res == expected)
res = apply_flip(image, axis=2)
expected = np.zeros((3, 3, 3, 1))
expected[0][..., 0] = [[0, 0, 1],
[0, 0, 1],
[0, 0, 1]]
expected[1][..., 0] = [[0, 0, 1],
[0, 0, 1],
[0, 0, 0]]
expected[2][..., 0] = [[0, 0, 1],
[0, 0, 0],
[0, 0, 0]]
assert np.all(res == expected)
| 33.273927
| 79
| 0.337582
| 2,547
| 20,164
| 2.602277
| 0.025128
| 0.185878
| 0.176976
| 0.148461
| 0.960169
| 0.930899
| 0.915661
| 0.887145
| 0.864062
| 0.795112
| 0
| 0.151867
| 0.473914
| 20,164
| 605
| 80
| 33.328926
| 0.472945
| 0.036352
| 0
| 0.825364
| 0
| 0
| 0.006597
| 0
| 0
| 0
| 0
| 0
| 0.089397
| 1
| 0.024948
| false
| 0
| 0.006237
| 0
| 0.031185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14b3b0fdbe92eec7c70d1a20742ce6236719a2d5
| 134
|
py
|
Python
|
__init__.py
|
zahrag/BRLVBVC
|
47c61eb69fbe96789b6a84c1510df0426bbcbfcc
|
[
"MIT"
] | 1
|
2022-03-17T01:41:04.000Z
|
2022-03-17T01:41:04.000Z
|
__init__.py
|
zahrag/BRLVBVC
|
47c61eb69fbe96789b6a84c1510df0426bbcbfcc
|
[
"MIT"
] | null | null | null |
__init__.py
|
zahrag/BRLVBVC
|
47c61eb69fbe96789b6a84c1510df0426bbcbfcc
|
[
"MIT"
] | null | null | null |
from . import ZGH_BRL_ACL as ZGH_BRL
from .ZGH_BRL_Agent import ZGH_BRL_Agent as ZGH_BRL_Agent
from .training_ZGH import training_ZGH
| 33.5
| 57
| 0.858209
| 26
| 134
| 4
| 0.307692
| 0.288462
| 0.317308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 134
| 3
| 58
| 44.666667
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
14bcfec6c5c6ce5d70bd350c6d77f014754a92ba
| 16,736
|
py
|
Python
|
Latest/venv/Lib/site-packages/pyface/tests/test_ui_traits.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | 1
|
2022-01-09T20:04:31.000Z
|
2022-01-09T20:04:31.000Z
|
Latest/venv/Lib/site-packages/pyface/tests/test_ui_traits.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | 1
|
2022-02-15T12:01:57.000Z
|
2022-03-24T19:48:47.000Z
|
Latest/venv/Lib/site-packages/pyface/tests/test_ui_traits.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | null | null | null |
#------------------------------------------------------------------------------
#
# Copyright (c) 2016, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Enthought Developers
#
#------------------------------------------------------------------------------
import os
import unittest
from traits.api import HasTraits, TraitError
from traits.testing.unittest_tools import UnittestTools
try:
from traits.trait_handlers import (
CALLABLE_AND_ARGS_DEFAULT_VALUE,
UNSPECIFIED_DEFAULT_VALUE
)
except ImportError:
UNSPECIFIED_DEFAULT_VALUE = -1
CALLABLE_AND_ARGS_DEFAULT_VALUE = 7
from ..i_image_resource import IImageResource
from ..image_resource import ImageResource
from ..ui_traits import (Border, HasBorder, HasMargin, Image, Margin,
image_resource_cache, image_bitmap_cache)
IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'images', 'core.png')
class ImageClass(HasTraits):
image = Image
class HasMarginClass(HasTraits):
margin = HasMargin
class HasBorderClass(HasTraits):
border = HasBorder
class TestImageTrait(unittest.TestCase, UnittestTools):
def setUp(self):
# clear all cached images
image_resource_cache.clear()
image_bitmap_cache.clear()
# clear cached "not found" image
ImageResource._image_not_found = None
def test_defaults(self):
image_class = ImageClass()
self.assertIsNone(image_class.image)
def test_init_local_image(self):
from pyface.image_resource import ImageResource
image_class = ImageClass(image=ImageResource('core.png'))
self.assertIsInstance(image_class.image, ImageResource)
self.assertEqual(image_class.image.name, 'core.png')
self.assertEqual(image_class.image.absolute_path,
os.path.abspath(IMAGE_PATH))
def test_init_pyface_image(self):
from pyface.image_resource import ImageResource
image_class = ImageClass(image='about')
im = image_class.image.create_image()
self.assertIsInstance(image_class.image, ImageResource)
self.assertEqual(image_class.image.name, 'about')
self.assertIsNone(image_class.image._image_not_found)
self.assertIsNotNone(image_class.image._ref.data)
def test_init_pyface_image_library(self):
from pyface.image_resource import ImageResource
image_class = ImageClass(image='@icons:dialog-warning')
self.assertIsInstance(image_class.image, ImageResource)
self.assertEqual(image_class.image.name, 'dialog-warning.png')
self.assertIsNone(image_class.image._image_not_found)
self.assertEqual(image_class.image._ref.file_name, 'dialog-warning.png')
self.assertEqual(image_class.image._ref.volume_name, 'icons')
class TestMargin(unittest.TestCase):
def test_defaults(self):
margin = Margin()
self.assertEqual(margin.top, 0)
self.assertEqual(margin.bottom, 0)
self.assertEqual(margin.left, 0)
self.assertEqual(margin.right, 0)
def test_init_one_arg(self):
margin = Margin(4)
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_init_two_args(self):
margin = Margin(4, 2)
self.assertEqual(margin.top, 2)
self.assertEqual(margin.bottom, 2)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_init_four_args(self):
margin = Margin(4, 2, 3, 1)
self.assertEqual(margin.top, 3)
self.assertEqual(margin.bottom, 1)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 2)
class TestHasMargin(unittest.TestCase, UnittestTools):
def test_defaults(self):
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 0)
self.assertEqual(margin.bottom, 0)
self.assertEqual(margin.left, 0)
self.assertEqual(margin.right, 0)
def test_unspecified_default(self):
trait = HasMargin()
trait.default_value_type = UNSPECIFIED_DEFAULT_VALUE
(dvt, dv) = trait.get_default_value()
self.assertEqual(dvt, CALLABLE_AND_ARGS_DEFAULT_VALUE)
self.assertEqual(
dv,
(
Margin,
(),
{'top': 0, 'bottom': 0, 'left': 0, 'right': 0},
)
)
def test_default_int(self):
class HasMarginClass(HasTraits):
margin = HasMargin(4)
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_default_one_tuple(self):
class HasMarginClass(HasTraits):
margin = HasMargin((4,))
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_default_two_tuple(self):
class HasMarginClass(HasTraits):
margin = HasMargin((4, 2))
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 2)
self.assertEqual(margin.bottom, 2)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_default_four_tuple(self):
class HasMarginClass(HasTraits):
margin = HasMargin((4, 2, 3, 1))
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 3)
self.assertEqual(margin.bottom, 1)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 2)
def test_default_margin(self):
m = Margin(left=4, right=2, top=3, bottom=1)
class HasMarginClass(HasTraits):
margin = HasMargin(m)
has_margin = HasMarginClass()
margin = has_margin.margin
self.assertEqual(margin.top, 3)
self.assertEqual(margin.bottom, 1)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 2)
def test_init_int(self):
has_margin = HasMarginClass(margin=4)
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_init_one_tuple(self):
has_margin = HasMarginClass(margin=(4,))
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_init_two_tuple(self):
has_margin = HasMarginClass(margin=(4, 2))
margin = has_margin.margin
self.assertEqual(margin.top, 2)
self.assertEqual(margin.bottom, 2)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_init_four_tuple(self):
has_margin = HasMarginClass(margin=(4, 2, 3, 1))
margin = has_margin.margin
self.assertEqual(margin.top, 3)
self.assertEqual(margin.bottom, 1)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 2)
def test_init_margin(self):
margin = Margin()
has_margin = HasMarginClass(margin=margin)
self.assertEqual(has_margin.margin, margin)
def test_set_int(self):
has_margin = HasMarginClass()
with self.assertTraitChanges(has_margin, 'margin', 1):
has_margin.margin = 4
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_set_one_tuple(self):
has_margin = HasMarginClass()
with self.assertTraitChanges(has_margin, 'margin', 1):
has_margin.margin = (4,)
margin = has_margin.margin
self.assertEqual(margin.top, 4)
self.assertEqual(margin.bottom, 4)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_set_two_tuple(self):
has_margin = HasMarginClass()
with self.assertTraitChanges(has_margin, 'margin', 1):
has_margin.margin = (4, 2)
margin = has_margin.margin
self.assertEqual(margin.top, 2)
self.assertEqual(margin.bottom, 2)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 4)
def test_set_four_tuple(self):
has_margin = HasMarginClass()
with self.assertTraitChanges(has_margin, 'margin', 1):
has_margin.margin = (4, 2, 3, 1)
margin = has_margin.margin
self.assertEqual(margin.top, 3)
self.assertEqual(margin.bottom, 1)
self.assertEqual(margin.left, 4)
self.assertEqual(margin.right, 2)
def test_set_margin(self):
margin = Margin()
has_margin = HasMarginClass()
with self.assertTraitChanges(has_margin, 'margin', 1):
has_margin.margin = margin
self.assertEqual(has_margin.margin, margin)
def test_set_invalid(self):
has_margin = HasMarginClass()
with self.assertRaises(TraitError):
has_margin.margin = (1, 2, 3)
class TestBorder(unittest.TestCase):
def test_defaults(self):
border = Border()
self.assertEqual(border.top, 0)
self.assertEqual(border.bottom, 0)
self.assertEqual(border.left, 0)
self.assertEqual(border.right, 0)
def test_init_one_arg(self):
border = Border(4)
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_init_two_args(self):
border = Border(4, 2)
self.assertEqual(border.top, 2)
self.assertEqual(border.bottom, 2)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_init_four_args(self):
border = Border(4, 2, 3, 1)
self.assertEqual(border.top, 3)
self.assertEqual(border.bottom, 1)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 2)
class TestHasBorder(unittest.TestCase, UnittestTools):
def test_defaults(self):
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 0)
self.assertEqual(border.bottom, 0)
self.assertEqual(border.left, 0)
self.assertEqual(border.right, 0)
def test_unspecified_default(self):
trait = HasBorder()
trait.default_value_type = UNSPECIFIED_DEFAULT_VALUE
(dvt, dv) = trait.get_default_value()
self.assertEqual(dvt, CALLABLE_AND_ARGS_DEFAULT_VALUE)
self.assertEqual(
dv,
(
Border,
(),
{'top': 0, 'bottom': 0, 'left': 0, 'right': 0},
)
)
def test_default_int(self):
class HasBorderClass(HasTraits):
border = HasBorder(4)
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_default_one_tuple(self):
class HasBorderClass(HasTraits):
border = HasBorder((4,))
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_default_two_tuple(self):
class HasBorderClass(HasTraits):
border = HasBorder((4, 2))
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 2)
self.assertEqual(border.bottom, 2)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_default_four_tuple(self):
class HasBorderClass(HasTraits):
border = HasBorder((4, 2, 3, 1))
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 3)
self.assertEqual(border.bottom, 1)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 2)
def test_default_border(self):
m = Margin(left=4, right=2, top=3, bottom=1)
class HasBorderClass(HasTraits):
border = HasBorder(m)
has_border = HasBorderClass()
border = has_border.border
self.assertEqual(border.top, 3)
self.assertEqual(border.bottom, 1)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 2)
def test_init_int(self):
has_border = HasBorderClass(border=4)
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_init_one_tuple(self):
has_border = HasBorderClass(border=(4,))
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_init_two_tuple(self):
has_border = HasBorderClass(border=(4, 2))
border = has_border.border
self.assertEqual(border.top, 2)
self.assertEqual(border.bottom, 2)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_init_four_tuple(self):
has_border = HasBorderClass(border=(4, 2, 3, 1))
border = has_border.border
self.assertEqual(border.top, 3)
self.assertEqual(border.bottom, 1)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 2)
def test_init_border(self):
border = Border()
has_border = HasBorderClass(border=border)
self.assertEqual(has_border.border, border)
def test_set_int(self):
has_border = HasBorderClass()
with self.assertTraitChanges(has_border, 'border', 1):
has_border.border = 4
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_set_one_tuple(self):
has_border = HasBorderClass()
with self.assertTraitChanges(has_border, 'border', 1):
has_border.border = (4,)
border = has_border.border
self.assertEqual(border.top, 4)
self.assertEqual(border.bottom, 4)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_set_two_tuple(self):
has_border = HasBorderClass()
with self.assertTraitChanges(has_border, 'border', 1):
has_border.border = (4, 2)
border = has_border.border
self.assertEqual(border.top, 2)
self.assertEqual(border.bottom, 2)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 4)
def test_set_four_tuple(self):
has_border = HasBorderClass()
with self.assertTraitChanges(has_border, 'border', 1):
has_border.border = (4, 2, 3, 1)
border = has_border.border
self.assertEqual(border.top, 3)
self.assertEqual(border.bottom, 1)
self.assertEqual(border.left, 4)
self.assertEqual(border.right, 2)
def test_set_border(self):
border = Border()
has_border = HasBorderClass()
with self.assertTraitChanges(has_border, 'border', 1):
has_border.border = border
self.assertEqual(has_border.border, border)
def test_set_invalid(self):
has_border = HasBorderClass()
with self.assertRaises(TraitError):
has_border.border = (1, 2, 3)
| 29.779359
| 80
| 0.63701
| 1,937
| 16,736
| 5.351058
| 0.078988
| 0.228654
| 0.145876
| 0.061746
| 0.868307
| 0.830391
| 0.799711
| 0.786782
| 0.758225
| 0.717125
| 0
| 0.019487
| 0.251852
| 16,736
| 561
| 81
| 29.832442
| 0.808322
| 0.037763
| 0
| 0.743655
| 0
| 0
| 0.012308
| 0.001305
| 0
| 0
| 0
| 0
| 0.449239
| 1
| 0.124365
| false
| 0
| 0.030457
| 0
| 0.208122
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1af894cc7fe63a0602d9583982f2e606eeb81a24
| 735
|
py
|
Python
|
test_wandb.py
|
zhihanyang2022/drqn
|
ac2482e3b42094e6242c042583dbbd9c98e4750b
|
[
"MIT"
] | 5
|
2021-03-28T14:12:40.000Z
|
2021-11-19T20:46:10.000Z
|
test_wandb.py
|
zhihanyang2022/drqn
|
ac2482e3b42094e6242c042583dbbd9c98e4750b
|
[
"MIT"
] | null | null | null |
test_wandb.py
|
zhihanyang2022/drqn
|
ac2482e3b42094e6242c042583dbbd9c98e4750b
|
[
"MIT"
] | null | null | null |
import wandb
import numpy as np
run = wandb.init(
project="drqn",
entity='pomdpr',
group='test',
settings=wandb.Settings(_disable_stats=True),
name='test-01',
reinit=True
)
for i in range(100):
wandb.log({'y': i + np.random.normal()})
run.finish()
run = wandb.init(
project="drqn",
entity='pomdpr',
group='test',
settings=wandb.Settings(_disable_stats=True),
name='test-02',
reinit=True
)
for i in range(100):
wandb.log({'y': i + np.random.normal()})
run.finish()
wandb.init(
project="drqn",
entity='pomdpr',
group='test',
settings=wandb.Settings(_disable_stats=True),
name='test-03'
)
for i in range(100):
wandb.log({'y': i + np.random.normal()})
| 17.926829
| 49
| 0.617687
| 103
| 735
| 4.349515
| 0.31068
| 0.060268
| 0.107143
| 0.133929
| 0.928571
| 0.928571
| 0.928571
| 0.928571
| 0.928571
| 0.928571
| 0
| 0.025467
| 0.198639
| 735
| 41
| 50
| 17.926829
| 0.735144
| 0
| 0
| 0.727273
| 0
| 0
| 0.089674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21131d2c67a9cef5f2022d821c9a8950c36267c8
| 11,143
|
py
|
Python
|
source/deepsecurity/api/agent_version_control_profiles_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:09.000Z
|
2021-10-30T16:40:09.000Z
|
source/deepsecurity/api/agent_version_control_profiles_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-07-28T20:19:03.000Z
|
2021-07-28T20:19:03.000Z
|
source/deepsecurity/api/agent_version_control_profiles_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:02.000Z
|
2021-10-30T16:40:02.000Z
|
# coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from deepsecurity.api_client import ApiClient
class AgentVersionControlProfilesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def describe_agent_version_control_profile(self, agent_version_control_profile_id, api_version, **kwargs): # noqa: E501
"""Describe an agent version control profile # noqa: E501
Describe an Agent Version Control Profile by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_agent_version_control_profile(agent_version_control_profile_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int agent_version_control_profile_id: The ID number of the agent version control profile to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: AgentVersionControlProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_agent_version_control_profile_with_http_info(agent_version_control_profile_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_agent_version_control_profile_with_http_info(agent_version_control_profile_id, api_version, **kwargs) # noqa: E501
return data
def describe_agent_version_control_profile_with_http_info(self, agent_version_control_profile_id, api_version, **kwargs): # noqa: E501
"""Describe an agent version control profile # noqa: E501
Describe an Agent Version Control Profile by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_agent_version_control_profile_with_http_info(agent_version_control_profile_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int agent_version_control_profile_id: The ID number of the agent version control profile to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: AgentVersionControlProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agent_version_control_profile_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_agent_version_control_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agent_version_control_profile_id' is set
if ('agent_version_control_profile_id' not in params or
params['agent_version_control_profile_id'] is None):
raise ValueError("Missing the required parameter `agent_version_control_profile_id` when calling `describe_agent_version_control_profile`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_agent_version_control_profile`") # noqa: E501
if 'agent_version_control_profile_id' in params and not re.search('\\d+', str(params['agent_version_control_profile_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `agent_version_control_profile_id` when calling `describe_agent_version_control_profile`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agent_version_control_profile_id' in params:
path_params['agentVersionControlProfileID'] = params['agent_version_control_profile_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/agentversioncontrolprofiles/{agentVersionControlProfileID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentVersionControlProfile', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_agent_version_control_profiles(self, api_version, **kwargs): # noqa: E501
"""List agent version control profiles # noqa: E501
Lists all agent version control profiles. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_agent_version_control_profiles(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: AgentVersionControlProfiles
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_agent_version_control_profiles_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.list_agent_version_control_profiles_with_http_info(api_version, **kwargs) # noqa: E501
return data
def list_agent_version_control_profiles_with_http_info(self, api_version, **kwargs): # noqa: E501
"""List agent version control profiles # noqa: E501
Lists all agent version control profiles. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_agent_version_control_profiles_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: AgentVersionControlProfiles
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_agent_version_control_profiles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_agent_version_control_profiles`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/agentversioncontrolprofiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentVersionControlProfiles', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.855967
| 311
| 0.649017
| 1,286
| 11,143
| 5.335925
| 0.148523
| 0.080443
| 0.127368
| 0.128825
| 0.874964
| 0.863597
| 0.834596
| 0.813757
| 0.761586
| 0.754299
| 0
| 0.016175
| 0.273176
| 11,143
| 242
| 312
| 46.045455
| 0.83109
| 0.347842
| 0
| 0.709677
| 0
| 0
| 0.252443
| 0.132101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040323
| false
| 0
| 0.032258
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21547feae364678759e20c48223c9750682d6458
| 5,637
|
py
|
Python
|
nca47/api/controllers/v1/firewall/vrf.py
|
WosunOO/nca_xianshu
|
bbb548cb67b755a57528796d4c5a66ee68df2678
|
[
"Apache-2.0"
] | null | null | null |
nca47/api/controllers/v1/firewall/vrf.py
|
WosunOO/nca_xianshu
|
bbb548cb67b755a57528796d4c5a66ee68df2678
|
[
"Apache-2.0"
] | null | null | null |
nca47/api/controllers/v1/firewall/vrf.py
|
WosunOO/nca_xianshu
|
bbb548cb67b755a57528796d4c5a66ee68df2678
|
[
"Apache-2.0"
] | null | null | null |
from oslo_serialization import jsonutils as json
from nca47.api.controllers.v1 import base
from nca47.common.i18n import _
from nca47.common.i18n import _LI, _LE
from nca47.common.exception import Nca47Exception
from oslo_log import log
from nca47.api.controllers.v1 import tools
from nca47.manager.central import CentralManager
from nca47.common.exception import BadRequest
from oslo_messaging import RemoteError
LOG = log.getLogger(__name__)
class VRFController(base.BaseRestController):
def __init__(self):
self.manager = CentralManager.get_instance()
super(VRFController, self).__init__()
def create(self, req, *args, **kwargs):
try:
url = req.url
if len(args) > 1:
raise BadRequest(resource="VRF create", msg=url)
context = req.context
body_values = json.loads(req.body)
valid_attributes = ['tenant_id', 'dc_name', 'network_zone', 'name',
'vrfInterface']
values = tools.validat_values(body_values, valid_attributes)
# input the staticnat values with dic format
LOG.info(_LI("input the vrf values with dic format\
is %(json)s"),
{"json": body_values})
response = self.manager.create_vrf(context, values)
return response
except Nca47Exception as e:
LOG.error(_LE('Nca47Exception! error info: ' + e.message))
self.response.status = e.code
return tools.ret_info(e.code, e.message)
except RemoteError as e:
self.response.status = 500
return tools.ret_info(self.response.status, e.value)
except Exception as e:
LOG.error(_LE('Exception! error info: ' + e.message))
self.response.status = 500
return tools.ret_info(self.response.status, e.message)
def remove(self, req, *args, **kwargs):
try:
url = req.url
if len(args) > 1:
raise BadRequest(resource="VRF del", msg=url)
context = req.context
body_values = json.loads(req.body)
valid_attributes = ['tenant_id', 'dc_name', 'network_zone', 'id']
values = tools.validat_values(body_values, valid_attributes)
# input the vrf values with dic format
LOG.info(_LI("delete the vrf values with dic format\
is %(json)s"),
{"json": body_values})
response = self.manager.del_vrf(context, values)
return response
except Nca47Exception as e:
LOG.error(_LE('Nca47Exception! error info: ' + e.message))
self.response.status = e.code
return tools.ret_info(e.code, e.message)
except RemoteError as e:
self.response.status = 500
return tools.ret_info(self.response.status, e.value)
except Exception as e:
LOG.error(_LE('Exception! error info: ' + e.message))
self.response.status = 500
return tools.ret_info(self.response.status, e.message)
def list(self, req, *args, **kwargs):
try:
url = req.url
if len(args) > 1:
raise BadRequest(resource="VRF getAll", msg=url)
context = req.context
body_values = json.loads(req.body)
valid_attributes = ['tenant_id', 'dc_name', 'network_zone']
values = tools.validat_values(body_values, valid_attributes)
# input the staticnat values with dic format
LOG.info(_LI("get_all the vrf values with dic format\
is %(json)s"),
{"json": body_values})
response = self.manager.get_vrfs(context, values)
return response
except Nca47Exception as e:
LOG.error(_LE('Nca47Exception! error info: ' + e.message))
self.response.status = e.code
return tools.ret_info(e.code, e.message)
except RemoteError as e:
self.response.status = 500
return tools.ret_info(self.response.status, e.value)
except Exception as e:
LOG.error(_LE('Exception! error info: ' + e.message))
self.response.status = 500
return tools.ret_info(self.response.status, e.message)
def show(self, req, *args, **kwargs):
try:
url = req.url
if len(args) > 1:
raise BadRequest(resource="VRF get", msg=url)
context = req.context
body_values = json.loads(req.body)
valid_attributes = ['tenant_id', 'dc_name', 'network_zone', 'id']
values = tools.validat_values(body_values, valid_attributes)
# input the staticnat values with dic format
LOG.info(_LI("get the vrf values with dic format\
is %(json)s"),
{"json": body_values})
response = self.manager.get_vrf(context, values)
return response
except Nca47Exception as e:
LOG.error(_LE('Nca47Exception! error info: ' + e.message))
self.response.status = e.code
return tools.ret_info(e.code, e.message)
except RemoteError as e:
self.response.status = 500
return tools.ret_info(self.response.status, e.value)
except Exception as e:
LOG.error(_LE('Exception! error info: ' + e.message))
self.response.status = 500
return tools.ret_info(self.response.status, e.message)
| 44.039063
| 79
| 0.588256
| 666
| 5,637
| 4.852853
| 0.136637
| 0.074257
| 0.111386
| 0.070545
| 0.88026
| 0.846225
| 0.827042
| 0.815903
| 0.815903
| 0.815903
| 0
| 0.017067
| 0.313997
| 5,637
| 127
| 80
| 44.385827
| 0.818723
| 0.029271
| 0
| 0.706897
| 0
| 0
| 0.070605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043103
| false
| 0
| 0.086207
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d0906ea05fd4461c6d773aff893933f39de41a4
| 22,155
|
py
|
Python
|
tests/e2e/basic/performance_tests/wifi_capacity_test/open/test_nat_mode.py
|
DYeag/wlan-testing
|
81e879d04ea3c6a55d14a330d461d8914507e3b2
|
[
"BSD-3-Clause"
] | 7
|
2020-08-19T16:45:46.000Z
|
2022-02-10T09:55:22.000Z
|
tests/e2e/basic/performance_tests/wifi_capacity_test/open/test_nat_mode.py
|
DYeag/wlan-testing
|
81e879d04ea3c6a55d14a330d461d8914507e3b2
|
[
"BSD-3-Clause"
] | 47
|
2020-12-20T16:06:03.000Z
|
2022-03-23T03:01:22.000Z
|
tests/e2e/basic/performance_tests/wifi_capacity_test/open/test_nat_mode.py
|
DYeag/wlan-testing
|
81e879d04ea3c6a55d14a330d461d8914507e3b2
|
[
"BSD-3-Clause"
] | 9
|
2021-02-04T22:32:06.000Z
|
2021-12-14T17:45:51.000Z
|
"""
Performance Test: Wifi Capacity Test : NAT Mode
pytest -m "wifi_capacity_test and NAT"
"""
import os
import pytest
import allure
pytestmark = [pytest.mark.wifi_capacity_test, pytest.mark.nat]
# """pytest.mark.usefixtures("setup_test_run")"""]
setup_params_general_dual_band = {
"mode": "NAT",
"ssid_modes": {
"open": [
{"ssid_name": "ssid_open_dual_band", "appliedRadios": ["2G", "5G"]}
]
},
"rf": {},
"radius": False
}
@allure.feature("NAT MODE CLIENT CONNECTIVITY")
@pytest.mark.parametrize(
'setup_profiles',
[setup_params_general_dual_band],
indirect=True,
scope="class"
)
@pytest.mark.usefixtures("setup_profiles")
@pytest.mark.open
@pytest.mark.twog
@pytest.mark.fiveg
@pytest.mark.dual_band
class TestWifiCapacityNATModeDualBand(object):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and NAT"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3695", name="WIFI-3695")
@pytest.mark.open
@pytest.mark.tcp_download
def test_client_open_nat_tcp_dl(self, get_vif_state, lf_tools, setup_profiles,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and dual_band"
"""
profile_data = setup_params_general_dual_band["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3696", name="WIFI-3696")
@pytest.mark.open
@pytest.mark.udp_download
def test_client_open_nat_udp_dl(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and dual_band"
"""
profile_data = setup_params_general_dual_band["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3698", name="WIFI-3698")
@pytest.mark.open
@pytest.mark.tcp_bidirectional
def test_client_open_nat_tcp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and dual_band"
"""
profile_data = setup_params_general_dual_band["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3697", name="WIFI-3697")
@pytest.mark.open
@pytest.mark.udp_bidirectional
def test_client_open_nat_udp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and dual_band"
"""
profile_data = setup_params_general_dual_band["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
setup_params_general_2G = {
"mode": "NAT",
"ssid_modes": {
"open": [
{"ssid_name": "ssid_open_2g", "appliedRadios": ["2G"]}
]
},
"rf": {},
"radius": False
}
@allure.feature("NAT MODE CLIENT CONNECTIVITY")
@pytest.mark.parametrize(
'setup_profiles',
[setup_params_general_2G],
indirect=True,
scope="class"
)
@pytest.mark.usefixtures("setup_profiles")
@pytest.mark.open
@pytest.mark.twog
@pytest.mark.twog_band
class TestWifiCapacityNATMode2G(object):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and NAT"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3648", name="WIFI-3648")
@pytest.mark.open
@pytest.mark.tcp_download
def test_client_open_nat_tcp_dl(self, get_vif_state, lf_tools, setup_profiles,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and twog"
"""
profile_data = setup_params_general_2G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3654", name="WIFI-3654")
@pytest.mark.open
@pytest.mark.udp_download
def test_client_open_nat_udp_dl(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and twog"
"""
profile_data = setup_params_general_2G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3670", name="WIFI-3670")
@pytest.mark.open
@pytest.mark.tcp_bidirectional
def test_client_open_nat_tcp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and twog"
"""
profile_data = setup_params_general_2G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3664", name="WIFI-3664")
@pytest.mark.open
@pytest.mark.udp_bidirectional
def test_client_open_nat_udp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_twog, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and twog"
"""
profile_data = setup_params_general_2G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="2G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
setup_params_general_5G = {
"mode": "NAT",
"ssid_modes": {
"open": [
{"ssid_name": "ssid_open_2g", "appliedRadios": ["5G"]}
]
},
"rf": {},
"radius": False
}
@allure.feature("NAT MODE CLIENT CONNECTIVITY")
@pytest.mark.parametrize(
'setup_profiles',
[setup_params_general_5G],
indirect=True,
scope="class"
)
@pytest.mark.usefixtures("setup_profiles")
@pytest.mark.open
@pytest.mark.fiveg
@pytest.mark.fiveg_band
class TestWifiCapacityNATMode5G(object):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and NAT"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3649", name="WIFI-3649")
@pytest.mark.open
@pytest.mark.tcp_download
def test_client_open_nat_tcp_dl(self, get_vif_state, lf_tools, setup_profiles,
lf_test, station_names_fiveg, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and fiveg"
"""
profile_data = setup_params_general_5G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3655", name="WIFI-3655")
@pytest.mark.open
@pytest.mark.udp_download
def test_client_open_nat_udp_dl(self, get_vif_state, lf_tools,
lf_test, station_names_fiveg, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and fiveg"
"""
profile_data = setup_params_general_5G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_dl", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="0", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3671", name="WIFI-3671")
@pytest.mark.open
@pytest.mark.tcp_bidirectional
def test_client_open_nat_tcp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_fiveg, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and fiveg"
"""
profile_data = setup_params_general_5G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_tcp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="TCP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-3665", name="WIFI-3665")
@pytest.mark.open
@pytest.mark.udp_bidirectional
def test_client_open_nat_udp_bidirectional(self, get_vif_state, lf_tools,
lf_test, station_names_fiveg, create_lanforge_chamberview_dut,
get_configuration):
""" Wifi Capacity Test NAT mode
pytest -m "wifi_capacity_test and nat and open and fiveg"
"""
profile_data = setup_params_general_5G["ssid_modes"]["open"][0]
ssid_name = profile_data["ssid_name"]
mode = "NAT"
vlan = 1
if ssid_name not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
lf_tools.add_stations(band="5G", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.add_stations(band="ax", num_stations="max", dut=lf_tools.dut_name, ssid_name=ssid_name)
lf_tools.Chamber_View()
wct_obj = lf_test.wifi_capacity(instance_name="test_client_open_NAT_udp_bi", mode=mode, vlan_id=vlan,
download_rate="1Gbps",
upload_rate="1Gbps", protocol="UDP-IPv4", duration="60000")
report_name = wct_obj.report_name[0]['LAST']["response"].split(":::")[1].split("/")[-1]
lf_tools.attach_report_graphs(report_name=report_name)
print("Test Completed... Cleaning up Stations")
assert True
| 49.453125
| 109
| 0.630648
| 2,852
| 22,155
| 4.584502
| 0.04453
| 0.058126
| 0.051396
| 0.038547
| 0.963671
| 0.958776
| 0.958776
| 0.958776
| 0.958776
| 0.956023
| 0
| 0.017375
| 0.251817
| 22,155
| 448
| 110
| 49.453125
| 0.771417
| 0.062334
| 0
| 0.877493
| 0
| 0
| 0.180243
| 0.015908
| 0
| 0
| 0
| 0
| 0.034188
| 1
| 0.034188
| false
| 0
| 0.008547
| 0
| 0.051282
| 0.034188
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d43716ff1da068fb190ee7e8bce52d6e19a0c9e
| 3,456
|
py
|
Python
|
matches/function.py
|
rgrannell1/aleph
|
775e5b0297087e2ada454696f537accbf23e7331
|
[
"Unlicense"
] | 1
|
2022-01-30T15:13:06.000Z
|
2022-01-30T15:13:06.000Z
|
matches/function.py
|
rgrannell1/aleph
|
775e5b0297087e2ada454696f537accbf23e7331
|
[
"Unlicense"
] | null | null | null |
matches/function.py
|
rgrannell1/aleph
|
775e5b0297087e2ada454696f537accbf23e7331
|
[
"Unlicense"
] | null | null | null |
from rule import Rule
from rule_match import RuleMatch
class FunctionRuleMatch(RuleMatch):
props = {
'name',
'file',
'location'
}
def create_table(self, conn):
sql = '''
create table if not exists function (
name string not null,
file string not null,
startLine integer,
startCol integer,
startOffset integer,
stopLine integer,
stopCol integer,
stopOffset integer,
primary key(name, file, startLine, startCol, startOffset, stopLine, stopCol, stopOffset)
);
'''
curr = conn.cursor()
curr.execute(sql)
conn.commit()
def write(self, conn):
sql = 'insert or replace into function (name, file, startLine, startCol, startOffset, stopLine, stopCol, stopOffset) values (?, ?, ?, ?, ?, ?, ?, ?)'
loc = self.data['location']
curr = conn.cursor()
curr.execute(sql, (
self.data['name'],
self.data['file'],
loc.start.line,
loc.start.col,
loc.start.offset,
loc.stop.line,
loc.stop.col,
loc.stop.offset
))
conn.commit()
class MethodRuleMatch(RuleMatch):
props = {
'receiver',
'name',
'type',
'file',
'location'
}
def create_table(self, conn):
sql = '''
create table if not exists method (
file string not null,
receiver string not null,
type string,
name string not null,
startLine integer,
startCol integer,
startOffset integer,
stopLine integer,
stopCol integer,
stopOffset integer,
primary key(file, receiver, type, name, startLine, startCol, startOffset, stopLine, stopCol, stopOffset)
);
'''
curr = conn.cursor()
curr.execute(sql)
conn.commit()
def write(self, conn):
sql = 'insert into method (receiver, name, file, startLine, startCol, startOffset, stopLine, stopCol, stopOffset) values (?, ?, ?, ?, ?, ?, ?, ?, ?)'
loc = self.data['location']
curr = conn.cursor()
curr.execute(sql, (
self.data['receiver'],
self.data['name'],
self.data['file'],
loc.start.line,
loc.start.col,
loc.start.offset,
loc.stop.line,
loc.stop.col,
loc.stop.offset
))
conn.commit()
class CallRuleMatch(RuleMatch):
props = {
'name',
'file',
'arguments',
'location'
}
def create_table(self, conn):
sql = '''
create table if not exists call (
name string not null,
file string not null,
arguments string not null,
startLine integer,
startCol integer,
startOffset integer,
stopLine integer,
stopCol integer,
stopOffset integer,
primary key(name, file, arguments, startLine, startCol, startOffset, stopLine, stopCol, stopOffset)
);
'''
curr = conn.cursor()
curr.execute(sql)
conn.commit()
def write(self, conn):
sql = 'insert or replace into call (name, file, arguments, startLine, startCol, startOffset, stopLine, stopCol, stopOffset) values (?, ?, ?, ?, ?, ?, ?, ?, ?)'
loc = self.data['location']
curr = conn.cursor()
curr.execute(sql, (
self.data['name'],
self.data['file'],
self.data['arguments'],
loc.start.line,
loc.start.col,
loc.start.offset,
loc.stop.line,
loc.stop.col,
loc.stop.offset
))
conn.commit()
| 23.351351
| 163
| 0.578125
| 367
| 3,456
| 5.433243
| 0.152589
| 0.044132
| 0.052156
| 0.108325
| 0.835005
| 0.835005
| 0.835005
| 0.835005
| 0.807422
| 0.794383
| 0
| 0
| 0.294271
| 3,456
| 147
| 164
| 23.510204
| 0.817548
| 0
| 0
| 0.798387
| 0
| 0.040323
| 0.529821
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0
| 0.016129
| 0
| 0.112903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e94c4bd8740c9700de7e6123f47f9318a9fe816
| 41,714
|
py
|
Python
|
atom/nucleus/python/nucleus_api/api/customer_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/customer_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/customer_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class CustomerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_customer_revenue_using_post(self, customer_revenue, **kwargs): # noqa: E501
"""Create a customer revenue # noqa: E501
Create a new customer revenue, with your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_customer_revenue_using_post(customer_revenue, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerRevenue customer_revenue: customerRevenue (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_customer_revenue_using_post_with_http_info(customer_revenue, **kwargs) # noqa: E501
else:
(data) = self.create_customer_revenue_using_post_with_http_info(customer_revenue, **kwargs) # noqa: E501
return data
def create_customer_revenue_using_post_with_http_info(self, customer_revenue, **kwargs): # noqa: E501
"""Create a customer revenue # noqa: E501
Create a new customer revenue, with your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_customer_revenue_using_post_with_http_info(customer_revenue, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerRevenue customer_revenue: customerRevenue (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_revenue'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_customer_revenue_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_revenue' is set
if ('customer_revenue' not in params or
params['customer_revenue'] is None):
raise ValueError("Missing the required parameter `customer_revenue` when calling `create_customer_revenue_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer_revenue' in params:
body_params = params['customer_revenue']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer_revenue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerRevenue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_customer_using_post(self, customer, **kwargs): # noqa: E501
"""Create a customer # noqa: E501
Create a new customer, with your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_customer_using_post(customer, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: customer (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_customer_using_post_with_http_info(customer, **kwargs) # noqa: E501
else:
(data) = self.create_customer_using_post_with_http_info(customer, **kwargs) # noqa: E501
return data
def create_customer_using_post_with_http_info(self, customer, **kwargs): # noqa: E501
"""Create a customer # noqa: E501
Create a new customer, with your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_customer_using_post_with_http_info(customer, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: customer (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer' is set
if ('customer' not in params or
params['customer'] is None):
raise ValueError("Missing the required parameter `customer` when calling `create_customer_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer' in params:
body_params = params['customer']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Customer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_customer_revenue_using_delete(self, customer_revenue_id, **kwargs): # noqa: E501
"""Delete a customer revenue # noqa: E501
Delete a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_revenue_using_delete(customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_customer_revenue_using_delete_with_http_info(customer_revenue_id, **kwargs) # noqa: E501
else:
(data) = self.delete_customer_revenue_using_delete_with_http_info(customer_revenue_id, **kwargs) # noqa: E501
return data
def delete_customer_revenue_using_delete_with_http_info(self, customer_revenue_id, **kwargs): # noqa: E501
"""Delete a customer revenue # noqa: E501
Delete a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_revenue_using_delete_with_http_info(customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_revenue_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_customer_revenue_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_revenue_id' is set
if ('customer_revenue_id' not in params or
params['customer_revenue_id'] is None):
raise ValueError("Missing the required parameter `customer_revenue_id` when calling `delete_customer_revenue_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_revenue_id' in params:
path_params['customer_revenue_id'] = params['customer_revenue_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer_revenue/{customer_revenue_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_customer_using_delete(self, customer_id, **kwargs): # noqa: E501
"""Delete a customer # noqa: E501
Delete a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_using_delete(customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: UUID customer_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_customer_using_delete_with_http_info(customer_id, **kwargs) # noqa: E501
else:
(data) = self.delete_customer_using_delete_with_http_info(customer_id, **kwargs) # noqa: E501
return data
def delete_customer_using_delete_with_http_info(self, customer_id, **kwargs): # noqa: E501
"""Delete a customer # noqa: E501
Delete a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_using_delete_with_http_info(customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: UUID customer_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_customer_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `delete_customer_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer/{customer_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_all_using_get(self, **kwargs): # noqa: E501
"""List all customer # noqa: E501
List all customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageCustomer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customer_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_customer_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all customer # noqa: E501
List all customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageCustomer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageCustomer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_revenue_all_using_get(self, **kwargs): # noqa: E501
"""List all customer revenue # noqa: E501
List all customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_revenue_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageCustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_revenue_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_customer_revenue_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_customer_revenue_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all customer revenue # noqa: E501
List all customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_revenue_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageCustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_revenue_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer_revenue', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageCustomerRevenue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_revenue_using_get(self, customer_revenue_id, **kwargs): # noqa: E501
"""Retrieve a customer revenue # noqa: E501
Retrieve a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_revenue_using_get(customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_revenue_using_get_with_http_info(customer_revenue_id, **kwargs) # noqa: E501
else:
(data) = self.get_customer_revenue_using_get_with_http_info(customer_revenue_id, **kwargs) # noqa: E501
return data
def get_customer_revenue_using_get_with_http_info(self, customer_revenue_id, **kwargs): # noqa: E501
"""Retrieve a customer revenue # noqa: E501
Retrieve a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_revenue_using_get_with_http_info(customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_revenue_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_revenue_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_revenue_id' is set
if ('customer_revenue_id' not in params or
params['customer_revenue_id'] is None):
raise ValueError("Missing the required parameter `customer_revenue_id` when calling `get_customer_revenue_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_revenue_id' in params:
path_params['customer_revenue_id'] = params['customer_revenue_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer_revenue/{customer_revenue_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerRevenue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_using_get(self, customer_id, **kwargs): # noqa: E501
"""Retrieve a customer # noqa: E501
Retrieve a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_using_get(customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: UUID customer_id (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_using_get_with_http_info(customer_id, **kwargs) # noqa: E501
else:
(data) = self.get_customer_using_get_with_http_info(customer_id, **kwargs) # noqa: E501
return data
def get_customer_using_get_with_http_info(self, customer_id, **kwargs): # noqa: E501
"""Retrieve a customer # noqa: E501
Retrieve a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_using_get_with_http_info(customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: UUID customer_id (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer/{customer_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Customer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_revenue_using_put(self, customer_revenue, customer_revenue_id, **kwargs): # noqa: E501
"""Update a customer revenue # noqa: E501
Update a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_revenue_using_put(customer_revenue, customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerRevenue customer_revenue: customer_revenue (required)
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_customer_revenue_using_put_with_http_info(customer_revenue, customer_revenue_id, **kwargs) # noqa: E501
else:
(data) = self.update_customer_revenue_using_put_with_http_info(customer_revenue, customer_revenue_id, **kwargs) # noqa: E501
return data
def update_customer_revenue_using_put_with_http_info(self, customer_revenue, customer_revenue_id, **kwargs): # noqa: E501
"""Update a customer revenue # noqa: E501
Update a customer revenue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_revenue_using_put_with_http_info(customer_revenue, customer_revenue_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CustomerRevenue customer_revenue: customer_revenue (required)
:param str customer_revenue_id: UUID customer_revenue_id (required)
:return: CustomerRevenue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_revenue', 'customer_revenue_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_revenue_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_revenue' is set
if ('customer_revenue' not in params or
params['customer_revenue'] is None):
raise ValueError("Missing the required parameter `customer_revenue` when calling `update_customer_revenue_using_put`") # noqa: E501
# verify the required parameter 'customer_revenue_id' is set
if ('customer_revenue_id' not in params or
params['customer_revenue_id'] is None):
raise ValueError("Missing the required parameter `customer_revenue_id` when calling `update_customer_revenue_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_revenue_id' in params:
path_params['customer_revenue_id'] = params['customer_revenue_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer_revenue' in params:
body_params = params['customer_revenue']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer_revenue/{customer_revenue_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerRevenue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_using_put(self, customer, customer_id, **kwargs): # noqa: E501
"""Update a customer # noqa: E501
Update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_using_put(customer, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: customer (required)
:param str customer_id: UUID customer_id (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_customer_using_put_with_http_info(customer, customer_id, **kwargs) # noqa: E501
else:
(data) = self.update_customer_using_put_with_http_info(customer, customer_id, **kwargs) # noqa: E501
return data
def update_customer_using_put_with_http_info(self, customer, customer_id, **kwargs): # noqa: E501
"""Update a customer # noqa: E501
Update a customer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_using_put_with_http_info(customer, customer_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Customer customer: customer (required)
:param str customer_id: UUID customer_id (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer', 'customer_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer' is set
if ('customer' not in params or
params['customer'] is None):
raise ValueError("Missing the required parameter `customer` when calling `update_customer_using_put`") # noqa: E501
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `update_customer_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'customer' in params:
body_params = params['customer']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/customer/{customer_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Customer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.071085
| 150
| 0.620271
| 4,771
| 41,714
| 5.126808
| 0.035213
| 0.050695
| 0.039616
| 0.029436
| 0.980294
| 0.973917
| 0.970891
| 0.965331
| 0.957522
| 0.948038
| 0
| 0.016395
| 0.2923
| 41,714
| 1,040
| 151
| 40.109615
| 0.812168
| 0.319893
| 0
| 0.829443
| 1
| 0
| 0.194498
| 0.05797
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037702
| false
| 0
| 0.007181
| 0
| 0.100539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2ef0bb09773722270d7c37ee64b305aa6c6a2373
| 442
|
py
|
Python
|
sample_problems/problems_with_solution1.py
|
adi01trip01/adi_workspace
|
f493b3ba84645eec3a57607243760a826880d1a3
|
[
"MIT"
] | null | null | null |
sample_problems/problems_with_solution1.py
|
adi01trip01/adi_workspace
|
f493b3ba84645eec3a57607243760a826880d1a3
|
[
"MIT"
] | null | null | null |
sample_problems/problems_with_solution1.py
|
adi01trip01/adi_workspace
|
f493b3ba84645eec3a57607243760a826880d1a3
|
[
"MIT"
] | null | null | null |
# print it like this
"""Twinkle, twinkle, little star,
How I wonder what you are!
Up above the world so high,
Like a diamond in the sky.
Twinkle, twinkle, little star,
How I wonder what you are"
"""
print("Twinkle, twinkle, little star,\n\tHow I wonder what you are!\n\t\tUp above the world so high,\n\t\tLike a diamond in the sky.\nTwinkle, twinkle, little star,\n\tHow I wonder what you are")
| 49.111111
| 195
| 0.653846
| 75
| 442
| 3.853333
| 0.386667
| 0.179931
| 0.235294
| 0.193772
| 0.816609
| 0.574394
| 0.574394
| 0.574394
| 0.574394
| 0.574394
| 0
| 0
| 0.253394
| 442
| 9
| 195
| 49.111111
| 0.875758
| 0.538462
| 0
| 0
| 0
| 1
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
258213dfe4274e42134e0dce30d28bab5b78e5fe
| 8,632
|
py
|
Python
|
test/test_cases__yb_chunk_dml_by_date_part.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | null | null | null |
test/test_cases__yb_chunk_dml_by_date_part.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | 4
|
2020-06-03T18:11:29.000Z
|
2022-03-07T20:41:16.000Z
|
test/test_cases__yb_chunk_dml_by_date_part.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | 2
|
2020-05-27T23:43:03.000Z
|
2022-03-03T23:16:15.000Z
|
map_out = {
r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{1,6}(-|\+)\d{2}' : 'YYYY-MM-DD HH:MM:SS.FFFFFF-TZ'
, r'\d{2}:\d{2}:\d{2}.\d{1,6}' : 'HH:MM:SS.FFFFFF'
, r'\d{4}-\d{2}-\d{2}' : 'YYYY-MM-DD'}
test_cases = [
test_case(
cmd=('yb_chunk_dml_by_date_part.py @{argsdir}/yb_chunk_dml_by_date_part__args1 '
'--execute_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 21:11:36.14636-06: Starting Date Part Chunking, first calculating HOUR group counts
--2020-08-22 21:11:36.745945-06: Build Chunk DMLs
--2020-08-22 21:11:36.746275-06: Chunk: 1, Rows: 327156, Range 2020-01-01 00:00:00 <= col12 < 2020-01-01 01:00:00
--2020-08-22 21:11:37.120013-06: Chunk: 2, Rows: 100284, Range 2020-01-01 01:00:00 <= col12 < 2020-01-03 00:00:00
--2020-08-22 21:11:37.412505-06: Chunk: 3, Rows: 100576, Range 2020-01-03 00:00:00 <= col12 < 2020-01-06 15:00:00
--2020-08-22 21:11:37.717729-06: Chunk: 4, Rows: 100288, Range 2020-01-06 15:00:00 <= col12 < 2020-01-12 10:00:00
--2020-08-22 21:11:38.019614-06: Chunk: 5, Rows: 100152, Range 2020-01-12 10:00:00 <= col12 < 2020-01-21 16:00:00
--2020-08-22 21:11:38.345962-06: Chunk: 6, Rows: 100040, Range 2020-01-21 16:00:00 <= col12 < 2020-02-06 10:00:00
--2020-08-22 21:11:38.590085-06: Chunk: 7, Rows: 100040, Range 2020-02-06 10:00:00 <= col12 < 2020-03-12 10:00:00
--2020-08-22 21:11:38.937236-06: Chunk: 8, Rows: 71464, Range 2020-03-12 10:00:00 <= col12 < 2021-12-02 15:00:00
--2020-08-22 21:11:39.338657-06: Chunk: 9, Rows: 0, col12 IS NULL
--2020-08-22 21:11:39.594577-06: Completed Date Part Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:03.449368
--Overhead duration : 00:00:00.616575
--Total Chunks : 9
--Min chunk size : 100000
--Largest chunk size : 327156
--Average chunk size : 111111
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
, test_case(
cmd=('yb_chunk_dml_by_date_part.py @{argsdir}/yb_chunk_dml_by_date_part__args1 '
'--print_chunk_dml --null_chunk_off --verbose_chunk_off')
, exit_code=0
, stdout="""-- Running DML chunking.
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 327156) >>>*/ TO_TIMESTAMP('2020-01-01 00:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-01 01:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100284) >>>*/ TO_TIMESTAMP('2020-01-01 01:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-03 00:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 100576) >>>*/ TO_TIMESTAMP('2020-01-03 00:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-06 15:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100288) >>>*/ TO_TIMESTAMP('2020-01-06 15:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-12 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100152) >>>*/ TO_TIMESTAMP('2020-01-12 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-21 16:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100040) >>>*/ TO_TIMESTAMP('2020-01-21 16:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-02-06 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 100040) >>>*/ TO_TIMESTAMP('2020-02-06 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-03-12 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 71464) >>>*/ TO_TIMESTAMP('2020-03-12 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2021-12-02 15:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
-- Completed DML chunking."""
, stderr='')
, test_case(
cmd=('yb_chunk_dml_by_date_part.py @{argsdir}/yb_chunk_dml_by_date_part__args1 '
'--print_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 21:10:26.137851-06: Starting Date Part Chunking, first calculating HOUR group counts
--2020-08-22 21:10:26.59311-06: Build Chunk DMLs
--2020-08-22 21:10:26.5935-06: Chunk: 1, Rows: 327156, Range 2020-01-01 00:00:00 <= col12 < 2020-01-01 01:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 327156) >>>*/ TO_TIMESTAMP('2020-01-01 00:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-01 01:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.594261-06: Chunk: 2, Rows: 100284, Range 2020-01-01 01:00:00 <= col12 < 2020-01-03 00:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100284) >>>*/ TO_TIMESTAMP('2020-01-01 01:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-03 00:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.594503-06: Chunk: 3, Rows: 100576, Range 2020-01-03 00:00:00 <= col12 < 2020-01-06 15:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 100576) >>>*/ TO_TIMESTAMP('2020-01-03 00:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-06 15:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.594825-06: Chunk: 4, Rows: 100288, Range 2020-01-06 15:00:00 <= col12 < 2020-01-12 10:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100288) >>>*/ TO_TIMESTAMP('2020-01-06 15:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-12 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.595243-06: Chunk: 5, Rows: 100152, Range 2020-01-12 10:00:00 <= col12 < 2020-01-21 16:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100152) >>>*/ TO_TIMESTAMP('2020-01-12 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-01-21 16:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.596105-06: Chunk: 6, Rows: 100040, Range 2020-01-21 16:00:00 <= col12 < 2020-02-06 10:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100040) >>>*/ TO_TIMESTAMP('2020-01-21 16:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-02-06 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.597597-06: Chunk: 7, Rows: 100040, Range 2020-02-06 10:00:00 <= col12 < 2020-03-12 10:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 100040) >>>*/ TO_TIMESTAMP('2020-02-06 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2020-03-12 10:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.608436-06: Chunk: 8, Rows: 71464, Range 2020-03-12 10:00:00 <= col12 < 2021-12-02 15:00:00
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 71464) >>>*/ TO_TIMESTAMP('2020-03-12 10:00:00','YYYY-MM-DD HH24:MI:SS') <= col12 AND col12 < TO_TIMESTAMP('2021-12-02 15:00:00','YYYY-MM-DD HH24:MI:SS') /*<<< chunk_clause */;
--2020-08-22 21:10:26.608949-06: Chunk: 9, Rows: 0, col12 IS NULL
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE col12 IS NULL;
--2020-08-22 21:10:26.609202-06: Completed Date Part Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:00.47247
--Overhead duration : 00:00:00.472545
--Total Chunks : 9
--Min chunk size : 100000
--Largest chunk size : 327156
--Average chunk size : 111111
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
]
| 94.857143
| 282
| 0.671918
| 1,614
| 8,632
| 3.475836
| 0.089839
| 0.05918
| 0.048485
| 0.057041
| 0.95205
| 0.940998
| 0.934938
| 0.921747
| 0.8918
| 0.885383
| 0
| 0.260794
| 0.133341
| 8,632
| 91
| 283
| 94.857143
| 0.489106
| 0
| 0
| 0.545455
| 0
| 0.443182
| 0.945094
| 0.241863
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.022727
| 0
| 0
| 0
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
259c400a577286baa4cbb09080519d065de39d59
| 22,530
|
py
|
Python
|
tests/tasks/tasks/util/test_save_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | null | null | null |
tests/tasks/tasks/util/test_save_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | 34
|
2015-12-14T22:06:57.000Z
|
2018-02-07T08:40:47.000Z
|
tests/tasks/tasks/util/test_save_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | 6
|
2018-04-20T14:48:54.000Z
|
2021-06-23T22:25:17.000Z
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyHqcLegacy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Test the taskused to load files.
"""
import os
from multiprocessing import Event
from collections import OrderedDict
import pytest
import enaml
import numpy as np
from exopy.tasks.api import RootTask
from exopy.testing.util import (show_and_close_widget, show_widget)
from exopy_hqc_legacy.tasks.tasks.util.save_tasks import (SaveTask,
SaveArrayTask,
SaveFileTask)
with enaml.imports():
from exopy_hqc_legacy.tasks.tasks.util.views.save_views\
import (SaveView, SaveArrayView, SaveFileView)
class TestSaveTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = SaveTask(name='Test')
self.root.add_child_task(0, self.task)
self.root.write_in_database('int', 1)
self.root.write_in_database('float', 2.0)
self.root.write_in_database('str', 'a')
def test_saving_target_observer(self):
"""Test that changing the target does change the database content.
"""
self.task.saving_target = 'Array'
assert self.task.get_from_database('Test_array') == np.array([1.0])
self.task.saving_target = 'File'
aux = self.task.list_accessible_database_entries()
assert 'Test_array' not in aux
self.task.saving_target = 'File and array'
assert self.task.get_from_database('Test_array') == np.array([1.0])
def test_check1(self, tmpdir):
"""Test everything ok in file mode (no array size).
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test{int}.txt'
task.file_mode = 'New'
task.header = 'test'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test1.txt')
test, traceback = task.check()
assert test and not traceback
assert not os.path.isfile(file_path)
assert not task.initialized
task.file_mode = 'Add'
test, traceback = task.check()
assert test and not traceback
assert os.path.isfile(file_path)
def test_check2(self):
"""Test everything ok in array mode (assert database state).
"""
task = self.task
task.saving_target = 'Array'
task.array_size = '1000*{float}'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
test, traceback = task.check()
assert test and not traceback
array = task.get_from_database('Test_array')
assert array.dtype.names == ('toto', 'tata')
def test_check3(self, tmpdir):
"""Test everything is ok in file & array mode.
"""
task = self.task
task.saving_target = 'File and array'
task.folder = str(tmpdir)
task.filename = 'test_rr.txt'
task.file_mode = 'New'
task.header = 'test'
task.array_size = '1000*{float}'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test_rr.txt')
test, traceback = task.check()
assert test and not traceback
assert not os.path.isfile(file_path)
array = task.get_from_database('Test_array')
assert array.dtype.names == ('toto', 'tata')
def test_check4(self, tmpdir):
"""Test check issues in file mode : folder.
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir) + '{tt}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check5(self, tmpdir):
"""Test check issues in file mode : file.
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test{tt}.txt'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check6(self, tmpdir):
"""Test check issues in file mode : array_size.
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test.txt'
task.file_mode = 'New'
task.header = 'test'
task.array_size = '1000*'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test.txt')
test, traceback = task.check()
assert not test
assert len(traceback) == 1
assert not os.path.isfile(file_path)
def test_check6bis(self, tmpdir):
"""Test check issues in file mode : header formatting.
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test.txt'
task.file_mode = 'New'
task.header = 'test {*}'
task.array_size = '1000'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test.txt')
test, traceback = task.check()
assert not test
assert len(traceback) == 1
assert not os.path.isfile(file_path)
def test_check7(self):
"""Test check issues in array mode : wrong array_size.
"""
task = self.task
task.saving_target = 'Array'
task.array_size = '1000*{float}*'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
test, traceback = task.check()
assert not test
assert len(traceback) == 1
assert self.task.get_from_database('Test_array') == np.array([1.0])
def test_check8(self):
"""Test check issues in array mode : absent array_size.
"""
task = self.task
task.saving_target = 'Array'
task.saved_values = OrderedDict([('toto', '{str}'),
('tata', '{float}')])
test, traceback = task.check()
assert not test
assert len(traceback) == 1
assert self.task.get_from_database('Test_array') == np.array([1.0])
def test_check9(self):
"""Test check issues in entrie.
"""
task = self.task
task.saving_target = 'Array'
task.array_size = '1000*{float}'
task.saved_values = OrderedDict([('toto', '*{str}'),
('tat{str}', '{float}@')])
test, traceback = task.check()
assert not test
assert len(traceback) == 2
def test_check9bis(self):
"""Test check issues in label.
"""
task = self.task
task.saving_target = 'Array'
task.array_size = '1000*{float}'
task.saved_values = OrderedDict([('toto', '{str}'),
('tat{str*}', '{float}')])
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check10(self, tmpdir):
"""Test warning in case the file already exists in new mode.
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test_e.txt'
task.file_mode = 'New'
task.header = 'test'
task.saved_values = OrderedDict([('toto', '{str}'),
('tat{str}', '{float}')])
file_path = os.path.join(str(tmpdir), 'test_e.txt')
with open(file_path, 'w'):
pass
assert os.path.isfile(file_path)
test, traceback = task.check()
assert test and traceback
assert os.path.isfile(file_path)
def test_perform1(self, tmpdir):
"""Test performing in mode file. (Call three times perform)
"""
task = self.task
task.saving_target = 'File'
task.folder = str(tmpdir)
task.filename = 'test_perform{int}.txt'
task.file_mode = 'Add'
task.header = 'test {str}'
task.array_size = '3'
task.saved_values = OrderedDict([('toto', '{str}'),
('tat{str}', '{float}')])
file_path = os.path.join(str(tmpdir), 'test_perform1.txt')
with open(file_path, 'w') as f:
f.write('test\n')
task.perform()
assert task.initialized
assert task.file_object
assert task.line_index == 1
with open(file_path) as f:
a = f.readlines()
assert a == ['test\n', '# test a\n', 'toto\ttata\n', 'a\t2.0\n']
task.perform()
assert task.initialized
assert task.line_index == 2
with open(file_path) as f:
a = f.readlines()
assert (a == ['test\n', '# test a\n', 'toto\ttata\n', 'a\t2.0\n',
'a\t2.0\n'])
task.perform()
assert not task.initialized
assert task.line_index == 3
with open(file_path) as f:
a = f.readlines()
assert a == ['test\n', '# test a\n', 'toto\ttata\n',
'a\t2.0\n', 'a\t2.0\n', 'a\t2.0\n']
def test_perform2(self):
"""Test performing in array mode. (Call three times perform)
"""
task = self.task
task.saving_target = 'Array'
task.array_size = '3'
task.saved_values = OrderedDict([('toto', '{int}'),
('tat{str}', '{float}')])
task.perform()
assert task.initialized
assert task.line_index == 1
task.perform()
assert task.initialized
assert task.line_index == 2
task.perform()
assert not task.initialized
assert task.line_index == 3
dtype = np.dtype({'names': [task.format_string(s)
for s in task.saved_values],
'formats': ['f8']*len(task.saved_values)})
array = np.empty((3), dtype)
array[0] = (1, 2.0)
array[1] = (1, 2.0)
array[2] = (1, 2.0)
np.testing.assert_array_equal(task.array, array)
class TestSaveFileTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = SaveFileTask(name='Test')
self.root.add_child_task(0, self.task)
self.root.write_in_database('int', 1)
self.root.write_in_database('float', 2.0)
self.root.write_in_database('array', np.array(range(10)))
def test_check1(self, tmpdir):
"""Test everything ok in file mode (no array size).
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test{int}.txt'
task.saved_values = OrderedDict([('toto', '{int}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test1.txt')
test, traceback = task.check()
assert test
assert not os.path.isfile(file_path)
assert not task.initialized
def test_check4(self, tmpdir):
"""Test check issues in file mode : folder.
"""
task = self.task
task.folder = str(tmpdir) + '{tt}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check5(self, tmpdir):
"""Test check issues in file mode : file.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test{tt}.txt'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check6(self, tmpdir):
"""Test check issues in file mode : header formatting.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test.txt'
task.header = 'test {*}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check9(self, tmpdir):
"""Test check issues in entries.
"""
task = self.task
task.folder = str(tmpdir)
self.root.write_in_database('int', 3)
task.filename = 'test{int}.txt'
task.saved_values = OrderedDict([('toto', '{int*}'),
('tata', '{float*}')])
test, traceback = task.check()
assert not test
assert len(traceback) == 2
def test_check10(self, tmpdir):
"""Test warning in case the file already exists in new mode.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_e.txt'
task.header = 'test'
task.saved_values = OrderedDict([('toto', '{int}'),
('tata', '{float}')])
file_path = os.path.join(str(tmpdir), 'test_e.txt')
with open(file_path, 'w'):
pass
assert os.path.isfile(file_path)
test, traceback = task.check()
assert test
assert traceback
assert os.path.isfile(file_path)
def test_perform1(self, tmpdir):
"""Test performing with non rec array. (Call twice perform)
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{int}.txt'
task.header = 'test {float}'
task.saved_values = OrderedDict([('toto', '{float}'),
('tata', '{array}')])
file_path = os.path.join(str(tmpdir), 'test_perform1.txt')
with open(file_path, 'w') as f:
f.write('test\n')
try:
task.perform()
assert task.initialized
assert task.file_object
with open(file_path) as f:
a = f.readlines()
assert a[:2] == ['# test 2.0\n', 'toto\ttata\n']
for i in range(10):
assert float(a[2+i].split('\t')[0]) == 2.0
assert float(a[2+i].split('\t')[1]) == float(i)
task.perform()
assert task.initialized
with open(file_path) as f:
a = f.readlines()
assert float(a[12].split('\t')[0]) == 2.0
assert float(a[12].split('\t')[1]) == 0.0
task.perform()
finally:
task.file_object.close()
def test_perform2(self, tmpdir):
"""Test performing with a rec array. (Call twice perform)
"""
self.root.write_in_database('array',
np.rec.fromarrays([range(10), range(10)],
names=['a', 'b']))
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform_rec.txt'
task.header = 'test'
task.saved_values = OrderedDict([('toto', '{float}'),
('tata', '{array}')])
file_path = os.path.join(str(tmpdir), 'test_perform_rec.txt')
with open(file_path, 'w') as f:
f.write('test\n')
try:
task.perform()
assert task.initialized
assert task.file_object
with open(file_path) as f:
a = f.readlines()
assert a[:2] == ['# test\n', 'toto\ttata_a\ttata_b\n']
for i in range(10):
assert float(a[2+i].split('\t')[0]) == 2.0
assert float(a[2+i].split('\t')[1]) == float(i)
task.perform()
assert task.initialized
with open(file_path) as f:
a = f.readlines()
assert float(a[12].split('\t')[0]) == 2.0
assert float(a[12].split('\t')[1]) == 0.0
task.perform()
finally:
task.file_object.close()
class TestSaveArrayTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = SaveArrayTask(name='Test')
self.root.add_child_task(0, self.task)
array = np.empty(2, dtype={'names': ('a', 'b'),
'formats': ('f8', 'f8')})
array[0] = (0, 1)
array[1] = (2, 3)
self.root.write_in_database('array', array)
self.root.write_in_database('float', 2.0)
self.root.write_in_database('str', 'a')
def test_check1(self, tmpdir):
"""Check everything ok in Text mode.
"""
array = np.empty(2, dtype={'names': ('a', 'b'),
'formats': ('f8', 'f8')})
self.root.write_in_database('arrays', {'a': array})
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Text file'
task.header = 'teststs'
task.target_array = '{arrays}["a"]'
test, traceback = task.check()
assert test
assert not os.path.isfile(os.path.join(str(tmpdir),
'test_performa.txt'))
def test_check2(self, tmpdir):
"""Check everything ok in Binary mode (wrong file extension, and
header)
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Binary file'
task.header = 'teststs'
task.target_array = '{array}'
test, traceback = task.check()
assert test
assert len(traceback) == 2
assert 'root/Test-header' in traceback
assert 'root/Test-file_ext' in traceback
assert not os.path.isfile(os.path.join(str(tmpdir),
'test_performa.npy'))
def test_check3(self, tmpdir):
"""Check handling a wrong folder.
"""
task = self.task
task.folder = str(tmpdir) + '{eee}'
task.target_array = '{array}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check4(self, tmpdir):
"""Check handling a wrong filename.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = '{rr}'
task.target_array = '{array}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check5(self, tmpdir):
"""Check handling a wrong database address.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Text file'
task.header = 'teststs'
task.target_array = '**{array}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_check6(self, tmpdir):
"""Check handling a wrong type.
"""
self.root.write_in_database('array', 1.0)
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Text file'
task.header = 'teststs'
task.target_array = '{array}'
test, traceback = task.check()
assert not test
assert len(traceback) == 1
def test_perform1(self, tmpdir):
"""Test performing in text mode.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Text file'
task.header = 'tests'
task.target_array = '{array}'
task.perform()
path = os.path.join(str(tmpdir), 'test_performa.txt')
assert os.path.isfile(path)
with open(path) as f:
lines = f.readlines()
assert lines[0:2] == ['# tests\n', 'a\tb\n']
assert [float(x) for x in lines[2][:-1].split('\t')] == [0.0, 1.0]
assert [float(x) for x in lines[3][:-1].split('\t')] == [2.0, 3.0]
def test_perform1bis(self, tmpdir):
# Test performing in text mode wrong type.
self.root.write_in_database('array', 1.0)
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.txt'
task.mode = 'Text file'
task.header = 'tests'
task.target_array = '{array}'
with pytest.raises(AssertionError):
task.perform()
def test_perform2(self, tmpdir):
"""Test performing in binary mode.
"""
task = self.task
task.folder = str(tmpdir)
task.filename = 'test_perform{str}.npy'
task.mode = 'Binary file'
task.target_array = '{array}'
task.perform()
path = os.path.join(str(tmpdir), 'test_performa.npy')
assert os.path.isfile(path)
# TODO understand weird numpy bug
# a = np.load(path)
# np.testing.assert_array_equal(a, task.get_from_database('array'))
@pytest.mark.ui
def test_save_view(exopy_qtbot, dialog_sleep, root_view):
"""Test SaveView widget.
"""
task = SaveTask(name='Test')
root_view.task.add_child_task(0, task)
view = SaveView(task=task, root=root_view)
win = show_widget(exopy_qtbot, view)
exopy_qtbot.wait(dialog_sleep)
d_editor = view.widgets()[-1]
d_editor._model.add_pair(0)
exopy_qtbot.wait(dialog_sleep)
win.close()
@pytest.mark.ui
def test_save_file_view(exopy_qtbot, root_view):
"""Test SaveView widget.
"""
task = SaveFileTask(name='Test')
root_view.task.add_child_task(0, task)
show_and_close_widget(exopy_qtbot, SaveFileView(task=task, root=root_view))
@pytest.mark.ui
def test_save_array_view(exopy_qtbot, root_view):
"""Test SaveView widget.
"""
task = SaveArrayTask(name='Test')
root_view.task.add_child_task(0, task)
show_and_close_widget(exopy_qtbot,
SaveArrayView(task=task, root=root_view))
| 30.653061
| 79
| 0.534177
| 2,664
| 22,530
| 4.407658
| 0.087838
| 0.030659
| 0.031681
| 0.042242
| 0.843383
| 0.804037
| 0.77193
| 0.737183
| 0.726878
| 0.695878
| 0
| 0.014867
| 0.3253
| 22,530
| 734
| 80
| 30.694823
| 0.757582
| 0.105326
| 0
| 0.778234
| 0
| 0
| 0.091729
| 0.010576
| 0
| 0
| 0
| 0.001362
| 0.215606
| 1
| 0.078029
| false
| 0.004107
| 0.022587
| 0
| 0.106776
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25ba51d1f837e1ebd06f83f9cb7e728b07bb421c
| 153
|
py
|
Python
|
profiles/admin.py
|
doriclazar/peak_30
|
a87217e4d0d1f96d39ad214d40a879c7abfaaaee
|
[
"Apache-2.0"
] | null | null | null |
profiles/admin.py
|
doriclazar/peak_30
|
a87217e4d0d1f96d39ad214d40a879c7abfaaaee
|
[
"Apache-2.0"
] | 1
|
2018-07-14T07:35:55.000Z
|
2018-07-16T07:40:49.000Z
|
profiles/admin.py
|
doriclazar/peak_30
|
a87217e4d0d1f96d39ad214d40a879c7abfaaaee
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import UserProfile, GroupProfile, BotProfile
admin.site.register((UserProfile, GroupProfile, BotProfile))
| 30.6
| 60
| 0.830065
| 17
| 153
| 7.470588
| 0.647059
| 0.362205
| 0.519685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091503
| 153
| 4
| 61
| 38.25
| 0.913669
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
25bd719dadc592ad7d9d42d960a5ca5975b9cfb4
| 6,574
|
py
|
Python
|
02-Multilayer/My_Own_Net/letters.py
|
maikelSoFly/PSI_GCP04_zima_2017-2018_Mikolaj_Stepniewski
|
012a97e54c186b1fb25e8300dd77e16759632c6e
|
[
"Apache-2.0"
] | null | null | null |
02-Multilayer/My_Own_Net/letters.py
|
maikelSoFly/PSI_GCP04_zima_2017-2018_Mikolaj_Stepniewski
|
012a97e54c186b1fb25e8300dd77e16759632c6e
|
[
"Apache-2.0"
] | null | null | null |
02-Multilayer/My_Own_Net/letters.py
|
maikelSoFly/PSI_GCP04_zima_2017-2018_Mikolaj_Stepniewski
|
012a97e54c186b1fb25e8300dd77e16759632c6e
|
[
"Apache-2.0"
] | null | null | null |
class LetterInput():
def __init__(self, letter):
self.__dict__['_x'] = []
self.__dict__['_d'] = None
self.__dict__['_letter'] = letter
self.getLetter()
def getLetter(self):
if self._letter == 'a':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 1, 1, 0,
0, 1, 0, 1, 0,
0, 1, 0, 1, 0,
0, 1, 1, 1, 1
]
self._d = 0
elif self._letter == 'a_noised':
self._x = [
1, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 1, 0,
0, 1, 0, 1, 0,
0, 1, 0, 1, 0,
0, 1, 1, 1, 1
]
self._d = 0
elif self._letter == 'b':
self._x = [
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 1, 1, 1, 0,
1, 0, 0, 1, 0,
1, 0, 0, 1, 0,
1, 1, 1, 1, 0
]
self._d = 0
elif self._letter == 't':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 1, 0, 0,
0, 1, 1, 1, 0,
0, 0, 1, 0, 0,
0, 0, 1, 1, 0
]
self._d = 0
elif self._letter == 'p':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 1, 1, 1, 0,
0, 1, 0, 1, 0,
0, 1, 1, 1, 0,
0, 1, 0, 1, 0
]
self._d = 0
elif self._letter == 'c':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 1, 1, 1, 0,
0, 1, 0, 0, 0,
0, 1, 0, 0, 0,
0, 1, 1, 1, 0
]
self._d = 0
elif self._letter == 'w':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 1, 0, 1,
0, 1, 0, 1, 0
]
self._d = 0
elif self._letter == 'd':
self._x = [
0, 0, 0, 0, 1,
0, 0, 0, 0, 1,
0, 0, 0, 0, 1,
0, 0, 1, 1, 1,
0, 1, 0, 0, 1,
0, 1, 0, 0, 1,
0, 1, 1, 1, 1
]
self._d = 0
elif self._letter == 'o':
self._x = [
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 1, 1, 1, 0,
0, 1, 0, 1, 0,
0, 1, 0, 1, 0,
0, 1, 1, 1, 0
]
self._d = 0
elif self._letter == 'A':
self._x = [
0, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 1, 1, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1
]
self._d = 1
elif self._letter == 'B':
self._x = [
1, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 1, 1, 0
]
self._d = 1
elif self._letter == 'I':
self._x = [
0, 0, 1, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 0, 0,
0, 0, 1, 0, 0
]
self._d = 1
elif self._letter == 'C':
self._x = [
0, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 0, 0, 0, 1,
0, 1, 1, 1, 0,
]
self._d = 1
elif self._letter == 'D':
self._x = [
1, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 1, 1, 0,
]
self._d = 1
elif self._letter == 'D_noised':
self._x = [
1, 1, 1, 1, 0,
1, 0, 0, 0, 1,
1, 0, 0, 1, 1,
1, 1, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 0, 0, 1,
1, 1, 1, 1, 0,
]
self._d = 1
elif self._letter == 'F':
self._x = [
1, 1, 1, 1, 1,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 1, 1, 1, 0,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
1, 0, 0, 0, 0,
]
self._d = 1
elif self._letter == 'K':
self._x = [
1, 0, 0, 0, 1,
1, 0, 0, 1, 0,
1, 0, 1, 0, 0,
1, 1, 0, 0, 0,
1, 0, 1, 0, 0,
1, 0, 0, 1, 0,
1, 0, 0, 0, 1,
]
self._d = 1
elif self._letter == 'H':
self._x = [
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 1, 1, 1, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
1, 0, 0, 0, 1,
]
self._d = 1
def __getitem__(self, index):
if index == 'x':
return self._x
elif index == 'd':
return self._d
elif index == 'letter':
return self._letter
| 28.336207
| 41
| 0.210526
| 838
| 6,574
| 1.553699
| 0.0358
| 0.402458
| 0.398618
| 0.341014
| 0.830261
| 0.830261
| 0.813364
| 0.799539
| 0.764977
| 0.729647
| 0
| 0.285086
| 0.654244
| 6,574
| 231
| 42
| 28.458874
| 0.287725
| 0
| 0
| 0.731132
| 0
| 0
| 0.007758
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014151
| false
| 0
| 0
| 0
| 0.033019
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
25dac684316c0f83ee944484a524223dfadb30e6
| 294,761
|
py
|
Python
|
python2/alibabacloud_oss_sdk/client.py
|
frenchvandal/alibabacloud-oss-sdk
|
6d5a8a3b126400403b97725002e50a999ff0d67d
|
[
"Apache-2.0"
] | 15
|
2019-12-05T16:20:58.000Z
|
2021-11-18T08:04:21.000Z
|
python2/alibabacloud_oss_sdk/client.py
|
frenchvandal/alibabacloud-oss-sdk
|
6d5a8a3b126400403b97725002e50a999ff0d67d
|
[
"Apache-2.0"
] | 168
|
2019-12-09T10:24:56.000Z
|
2022-02-07T06:33:25.000Z
|
python2/alibabacloud_oss_sdk/client.py
|
frenchvandal/alibabacloud-oss-sdk
|
6d5a8a3b126400403b97725002e50a999ff0d67d
|
[
"Apache-2.0"
] | 8
|
2020-01-03T10:00:24.000Z
|
2021-05-16T09:10:28.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from __future__ import unicode_literals
import time
from Tea.exceptions import TeaException, UnretryableException
from Tea.request import TeaRequest
from Tea.core import TeaCore
from Tea.converter import TeaConverter
from alibabacloud_credentials.client import Client as CredentialClient
from alibabacloud_oss_sdk import models as oss_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_credentials import models as credential_models
from alibabacloud_tea_xml.client import Client as XMLClient
from alibabacloud_oss_util.client import Client as OSSUtilClient
from alibabacloud_tea_fileform.client import Client as FileFormClient
class Client(object):
_endpoint = None # type: str
_region_id = None # type: str
_host_model = None # type: str
_protocol = None # type: str
_read_timeout = None # type: int
_connect_timeout = None # type: int
_signature_version = None # type: str
_addtional_headers = None # type: list[str]
_local_addr = None # type: str
_http_proxy = None # type: str
_https_proxy = None # type: str
_no_proxy = None # type: str
_user_agent = None # type: str
_socks_5proxy = None # type: str
_is_enable_crc = None # type: bool
_is_enable_md5 = None # type: bool
_socks_5net_work = None # type: str
_max_idle_conns = None # type: int
_credential = None # type: CredentialClient
def __init__(self, config):
if UtilClient.is_unset(config):
raise TeaException({
'name': 'ParameterMissing',
'message': "'config' can not be unset"
})
if UtilClient.empty(config.type):
config.type = 'access_key'
credential_config = credential_models.Config(
access_key_id=config.access_key_id,
type=config.type,
access_key_secret=config.access_key_secret,
security_token=config.security_token
)
self._credential = CredentialClient(credential_config)
if UtilClient.is_unset(config.is_enable_md5):
config.is_enable_md5 = False
if UtilClient.is_unset(config.is_enable_crc):
config.is_enable_crc = False
self._endpoint = config.endpoint
self._protocol = config.protocol
self._region_id = config.region_id
self._user_agent = config.user_agent
self._read_timeout = config.read_timeout
self._connect_timeout = config.connect_timeout
self._local_addr = config.local_addr
self._http_proxy = config.http_proxy
self._https_proxy = config.https_proxy
self._no_proxy = config.no_proxy
self._socks_5proxy = config.socks_5proxy
self._socks_5net_work = config.socks_5net_work
self._max_idle_conns = config.max_idle_conns
self._signature_version = config.signature_version
self._addtional_headers = config.addtional_headers
self._host_model = config.host_model
self._is_enable_md5 = config.is_enable_md5
self._is_enable_crc = config.is_enable_crc
def put_bucket_lifecycle(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?lifecycle'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketLifecycleResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_multiple_objects(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/?delete'
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
if not UtilClient.is_unset(request.header) and not UtilClient.empty(request.header.content_md5):
_request.headers['content-md5'] = request.header.content_md5
else:
_request.headers['content-md5'] = OSSUtilClient.get_content_md5(req_body, self._is_enable_md5)
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.DeleteMultipleObjectsResponse())
return oss_models.DeleteMultipleObjectsResponse().from_map(
TeaCore.merge({
'DeleteResult': resp_map.get('DeleteResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_referer(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?referer'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketRefererResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_website(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?website'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketWebsiteResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def complete_multipart_upload(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.CompleteMultipartUploadResponse())
return oss_models.CompleteMultipartUploadResponse().from_map(
TeaCore.merge({
'CompleteMultipartUploadResult': resp_map.get('CompleteMultipartUploadResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_logging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?logging'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketLoggingResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_request_payment(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?requestPayment'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketRequestPaymentResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_encryption(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?encryption'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketEncryptionResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_live_channel(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.PutLiveChannelResponse())
return oss_models.PutLiveChannelResponse().from_map(
TeaCore.merge({
'CreateLiveChannelResult': resp_map.get('CreateLiveChannelResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_tags(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?tagging'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketTagsResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_object_tagging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s?tagging' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutObjectTaggingResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def select_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.SelectObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_cors(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?cors'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketCORSResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
req_body = XMLClient.to_xml(TeaCore.to_map(request.body))
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/'
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = req_body
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def list_multipart_uploads(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?uploads'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.ListMultipartUploadsResponse())
return oss_models.ListMultipartUploadsResponse().from_map(
TeaCore.merge({
'ListMultipartUploadsResult': resp_map.get('ListMultipartUploadsResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_request_payment(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?requestPayment'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketRequestPaymentResponse())
return oss_models.GetBucketRequestPaymentResponse().from_map(
TeaCore.merge({
'RequestPaymentConfiguration': resp_map.get('RequestPaymentConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_encryption(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?encryption'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketEncryptionResponse())
return oss_models.GetBucketEncryptionResponse().from_map(
TeaCore.merge({
'ServerSideEncryptionRule': resp_map.get('ServerSideEncryptionRule')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_tags(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?tagging'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketTagsResponse())
return oss_models.GetBucketTagsResponse().from_map(
TeaCore.merge({
'Tagging': resp_map.get('Tagging')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_service(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host('', self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, '', access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetServiceResponse())
return oss_models.GetServiceResponse().from_map(
TeaCore.merge({
'ListAllMyBucketsResult': resp_map.get('ListAllMyBucketsResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_encryption(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/?encryption'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketEncryptionResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_tags(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketTagsResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_website(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?website'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketWebsiteResponse())
return oss_models.GetBucketWebsiteResponse().from_map(
TeaCore.merge({
'WebsiteConfiguration': resp_map.get('WebsiteConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_live_channel(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteLiveChannelResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_location(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?location'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketLocationResponse())
return oss_models.GetBucketLocationResponse().from_map(
TeaCore.merge({
'LocationConstraint': resp_map.get('LocationConstraint')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def list_live_channel(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?live'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.ListLiveChannelResponse())
return oss_models.ListLiveChannelResponse().from_map(
TeaCore.merge({
'ListLiveChannelResult': resp_map.get('ListLiveChannelResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_object_meta(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'HEAD'
_request.pathname = '/%s?objectMeta' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.GetObjectMetaResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_acl(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?acl'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketAclResponse())
return oss_models.GetBucketAclResponse().from_map(
TeaCore.merge({
'AccessControlPolicy': resp_map.get('AccessControlPolicy')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def list_parts(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.ListPartsResponse())
return oss_models.ListPartsResponse().from_map(
TeaCore.merge({
'ListPartsResult': resp_map.get('ListPartsResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_live_channel_history(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetLiveChannelHistoryResponse())
return oss_models.GetLiveChannelHistoryResponse().from_map(
TeaCore.merge({
'LiveChannelHistory': resp_map.get('LiveChannelHistory')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketResponse())
return oss_models.GetBucketResponse().from_map(
TeaCore.merge({
'ListBucketResult': resp_map.get('ListBucketResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_live_channel_info(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetLiveChannelInfoResponse())
return oss_models.GetLiveChannelInfoResponse().from_map(
TeaCore.merge({
'LiveChannelConfiguration': resp_map.get('LiveChannelConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_live_channel_stat(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetLiveChannelStatResponse())
return oss_models.GetLiveChannelStatResponse().from_map(
TeaCore.merge({
'LiveChannelStat': resp_map.get('LiveChannelStat')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def abort_multipart_upload(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.AbortMultipartUploadResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def append_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
ctx = {}
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s?append' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)),
OSSUtilClient.parse_meta(request.user_meta, 'x-oss-meta-'))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.body = OSSUtilClient.inject(request.body, ctx)
if not UtilClient.is_unset(request.header) and not UtilClient.empty(request.header.content_type):
_request.headers['content-type'] = request.header.content_type
else:
_request.headers['content-type'] = OSSUtilClient.get_content_type(request.object_name)
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
if self._is_enable_crc and not UtilClient.equal_string(ctx.get('crc'), _response.headers.get('x-oss-hash-crc64ecma')):
raise TeaException({
'code': 'CrcNotMatched',
'data': {
'clientCrc': ctx.get('crc'),
'serverCrc': _response.headers.get('x-oss-hash-crc64ecma')
}
})
if self._is_enable_md5 and not UtilClient.equal_string(ctx.get('md5'), _response.headers.get('content-md5')):
raise TeaException({
'code': 'MD5NotMatched',
'data': {
'clientMD5': ctx.get('md5'),
'serverMD5': _response.headers.get('content-md5')
}
})
return oss_models.AppendObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def upload_part_copy(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.UploadPartCopyResponse())
return oss_models.UploadPartCopyResponse().from_map(
TeaCore.merge({
'CopyPartResult': resp_map.get('CopyPartResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_vod_playlist(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?vod' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.GetVodPlaylistResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_cors(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/?cors'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketCORSResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.GetObjectResponse().from_map(
TeaCore.merge({
'body': _response.body
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def upload_part(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
ctx = {}
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.body = OSSUtilClient.inject(request.body, ctx)
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
if self._is_enable_crc and not UtilClient.equal_string(ctx.get('crc'), _response.headers.get('x-oss-hash-crc64ecma')):
raise TeaException({
'code': 'CrcNotMatched',
'data': {
'clientCrc': ctx.get('crc'),
'serverCrc': _response.headers.get('x-oss-hash-crc64ecma')
}
})
if self._is_enable_md5 and not UtilClient.equal_string(ctx.get('md5'), _response.headers.get('content-md5')):
raise TeaException({
'code': 'MD5NotMatched',
'data': {
'clientMD5': ctx.get('md5'),
'serverMD5': _response.headers.get('content-md5')
}
})
return oss_models.UploadPartResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_cors(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?cors'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketCORSResponse())
return oss_models.GetBucketCORSResponse().from_map(
TeaCore.merge({
'CORSConfiguration': resp_map.get('CORSConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def copy_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.dest_object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['x-oss-copy-source'] = OSSUtilClient.encode(_request.headers.get('x-oss-copy-source'), 'UrlEncode')
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.CopyObjectResponse())
return oss_models.CopyObjectResponse().from_map(
TeaCore.merge({
'CopyObjectResult': resp_map.get('CopyObjectResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_object_tagging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?tagging' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetObjectTaggingResponse())
return oss_models.GetObjectTaggingResponse().from_map(
TeaCore.merge({
'Tagging': resp_map.get('Tagging')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_lifecycle(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/?lifecycle'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketLifecycleResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_logging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/?logging'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketLoggingResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket_website(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/?website'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketWebsiteResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_symlink(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?symlink' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.GetSymlinkResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_lifecycle(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?lifecycle'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketLifecycleResponse())
return oss_models.GetBucketLifecycleResponse().from_map(
TeaCore.merge({
'LifecycleConfiguration': resp_map.get('LifecycleConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_symlink(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s?symlink' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutSymlinkResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_referer(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?referer'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketRefererResponse())
return oss_models.GetBucketRefererResponse().from_map(
TeaCore.merge({
'RefererConfiguration': resp_map.get('RefererConfiguration')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def callback(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.CallbackResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_logging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?logging'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketLoggingResponse())
return oss_models.GetBucketLoggingResponse().from_map(
TeaCore.merge({
'BucketLoggingStatus': resp_map.get('BucketLoggingStatus')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_object_acl(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s?acl' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutObjectAclResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_bucket_info(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/?bucketInfo'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetBucketInfoResponse())
return oss_models.GetBucketInfoResponse().from_map(
TeaCore.merge({
'BucketInfo': resp_map.get('BucketInfo')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_live_channel_status(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s?live' % TeaConverter.to_unicode(request.channel_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutLiveChannelStatusResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def initiate_multipart_upload(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s?uploads' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
if not UtilClient.is_unset(request.header) and not UtilClient.empty(request.header.content_type):
_request.headers['content-type'] = request.header.content_type
else:
_request.headers['content-type'] = OSSUtilClient.get_content_type(request.object_name)
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.InitiateMultipartUploadResponse())
return oss_models.InitiateMultipartUploadResponse().from_map(
TeaCore.merge({
'InitiateMultipartUploadResult': resp_map.get('InitiateMultipartUploadResult')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def option_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'OPTIONS'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.OptionObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def post_vod_playlist(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s/%s?vod' % (TeaConverter.to_unicode(request.channel_name), TeaConverter.to_unicode(request.playlist_name))
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.query = UtilClient.stringify_map_value(TeaCore.to_map(request.filter))
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PostVodPlaylistResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def post_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
boundary = FileFormClient.get_boundary()
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
_request.headers['content-type'] = 'multipart/form-data; boundary=%s' % TeaConverter.to_unicode(boundary)
form = TeaCore.merge({
'OSSAccessKeyId': request.header.access_key_id,
'policy': request.header.policy,
'Signature': request.header.signature,
'key': request.header.key,
'success_action_status': request.header.success_action_status,
'file': request.header.file
}, OSSUtilClient.to_meta(request.header.user_meta, 'x-oss-meta-'))
_request.body = FileFormClient.to_file_form(form, boundary)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = UtilClient.read_as_string(_response.body)
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
resp_map = XMLClient.parse_xml(body_str, oss_models.PostObjectResponse())
return oss_models.PostObjectResponse().from_map(
TeaCore.merge(resp_map)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def head_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'HEAD'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.HeadObjectResponse().from_map(
TeaCore.merge({
'usermeta': OSSUtilClient.to_meta(_response.headers, 'x-oss-meta-')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_object_tagging(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/%s?tagging' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteObjectTaggingResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def restore_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'POST'
_request.pathname = '/%s?restore' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.RestoreObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def get_object_acl(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'GET'
_request.pathname = '/%s?acl' % TeaConverter.to_unicode(request.object_name)
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
body_str = UtilClient.read_as_string(_response.body)
resp_map = XMLClient.parse_xml(body_str, oss_models.GetObjectAclResponse())
return oss_models.GetObjectAclResponse().from_map(
TeaCore.merge({
'AccessControlPolicy': resp_map.get('AccessControlPolicy')
}, _response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_bucket_acl(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/?acl'
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.PutBucketAclResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def delete_bucket(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'DELETE'
_request.pathname = '/'
_request.headers = {
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
return oss_models.DeleteBucketResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def put_object(self, request, runtime):
request.validate()
runtime.validate()
_runtime = {
'timeouted': 'retry',
'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),
'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),
'localAddr': UtilClient.default_string(runtime.local_addr, self._local_addr),
'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),
'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),
'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),
'socks5Proxy': UtilClient.default_string(runtime.socks_5proxy, self._socks_5proxy),
'socks5NetWork': UtilClient.default_string(runtime.socks_5net_work, self._socks_5net_work),
'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),
'retry': {
'retryable': runtime.autoretry,
'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)
},
'backoff': {
'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),
'period': UtilClient.default_number(runtime.backoff_period, 1)
},
'ignoreSSL': runtime.ignore_ssl
}
_last_request = None
_last_exception = None
_now = time.time()
_retry_times = 0
while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):
if _retry_times > 0:
_backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)
if _backoff_time > 0:
TeaCore.sleep(_backoff_time)
_retry_times = _retry_times + 1
try:
_request = TeaRequest()
ctx = {}
access_key_id = self._credential.get_access_key_id()
access_key_secret = self._credential.get_access_key_secret()
token = self._credential.get_security_token()
_request.protocol = self._protocol
_request.method = 'PUT'
_request.pathname = '/%s' % TeaConverter.to_unicode(request.object_name)
_request.headers = TeaCore.merge({
'host': OSSUtilClient.get_host(request.bucket_name, self._region_id, self._endpoint, self._host_model),
'date': UtilClient.get_date_utcstring(),
'user-agent': self.get_user_agent()
}, UtilClient.stringify_map_value(TeaCore.to_map(request.header)),
OSSUtilClient.parse_meta(request.user_meta, 'x-oss-meta-'))
if not UtilClient.empty(token):
_request.headers['x-oss-security-token'] = token
_request.body = OSSUtilClient.inject(request.body, ctx)
if not UtilClient.is_unset(request.header) and not UtilClient.empty(request.header.content_type):
_request.headers['content-type'] = request.header.content_type
else:
_request.headers['content-type'] = OSSUtilClient.get_content_type(request.object_name)
_request.headers['authorization'] = OSSUtilClient.get_signature(_request, request.bucket_name, access_key_id, access_key_secret, self._signature_version, self._addtional_headers)
_last_request = _request
_response = TeaCore.do_action(_request, _runtime)
resp_map = None
body_str = None
if UtilClient.is_4xx(_response.status_code) or UtilClient.is_5xx(_response.status_code):
body_str = UtilClient.read_as_string(_response.body)
resp_map = OSSUtilClient.get_err_message(body_str)
raise TeaException({
'code': resp_map.get('Code'),
'message': resp_map.get('Message'),
'data': {
'httpCode': _response.status_code,
'requestId': resp_map.get('RequestId'),
'hostId': resp_map.get('HostId')
}
})
if self._is_enable_crc and not UtilClient.equal_string(ctx.get('crc'), _response.headers.get('x-oss-hash-crc64ecma')):
raise TeaException({
'code': 'CrcNotMatched',
'data': {
'clientCrc': ctx.get('crc'),
'serverCrc': _response.headers.get('x-oss-hash-crc64ecma')
}
})
if self._is_enable_md5 and not UtilClient.equal_string(ctx.get('md5'), _response.headers.get('content-md5')):
raise TeaException({
'code': 'MD5NotMatched',
'data': {
'clientMD5': ctx.get('md5'),
'serverMD5': _response.headers.get('content-md5')
}
})
return oss_models.PutObjectResponse().from_map(
TeaCore.merge(_response.headers)
)
except Exception as e:
if TeaCore.is_retryable(e):
_last_exception = e
continue
raise e
raise UnretryableException(_last_request, _last_exception)
def set_user_agent(self, user_agent):
self._user_agent = user_agent
def append_user_agent(self, user_agent):
self._user_agent = '%s %s' % (TeaConverter.to_unicode(self._user_agent), TeaConverter.to_unicode(user_agent))
def get_user_agent(self):
user_agent = UtilClient.get_user_agent(self._user_agent)
return user_agent
def get_access_key_id(self):
if UtilClient.is_unset(self._credential):
return ''
access_key_id = self._credential.get_access_key_id()
return access_key_id
def get_access_key_secret(self):
if UtilClient.is_unset(self._credential):
return ''
secret = self._credential.get_access_key_secret()
return secret
| 55.095514
| 194
| 0.584579
| 28,190
| 294,761
| 5.705215
| 0.011955
| 0.083399
| 0.065641
| 0.085618
| 0.955842
| 0.954872
| 0.954331
| 0.953087
| 0.95162
| 0.950855
| 0
| 0.004867
| 0.322526
| 294,761
| 5,349
| 195
| 55.105814
| 0.800519
| 0.000963
| 0
| 0.870019
| 1
| 0
| 0.073943
| 0.001749
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013662
| false
| 0
| 0.002467
| 0
| 0.033397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d3137311c4f843d70ae341456f944adc38a794fb
| 232
|
py
|
Python
|
.setup/cmds/__init__.py
|
pygalle-io/pygalle.core.base.klass
|
fa683f7f88b63ca46a0970af81a558c9efbbe942
|
[
"MIT"
] | null | null | null |
.setup/cmds/__init__.py
|
pygalle-io/pygalle.core.base.klass
|
fa683f7f88b63ca46a0970af81a558c9efbbe942
|
[
"MIT"
] | null | null | null |
.setup/cmds/__init__.py
|
pygalle-io/pygalle.core.base.klass
|
fa683f7f88b63ca46a0970af81a558c9efbbe942
|
[
"MIT"
] | null | null | null |
from .lint_cmd import PylintCommand
from .readme_cmd import GenerateReadmeCommand
from .apidoc_cmd import BuildApiCommand
from .coverage_cmd import CoverageCommand
from .coveralls_cmd import CoverallsCommand
from .build import Build
| 38.666667
| 45
| 0.875
| 29
| 232
| 6.827586
| 0.482759
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099138
| 232
| 6
| 46
| 38.666667
| 0.947368
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d37a506a76f3220bbd9580ce95dcb924598eb24c
| 5,434
|
py
|
Python
|
tests/test_state_machine_genome_partially_fixed.py
|
matthijsdentoom/neat-python
|
3b403f9efc841b85746b40cecf18ef8b57d50584
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_state_machine_genome_partially_fixed.py
|
matthijsdentoom/neat-python
|
3b403f9efc841b85746b40cecf18ef8b57d50584
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_state_machine_genome_partially_fixed.py
|
matthijsdentoom/neat-python
|
3b403f9efc841b85746b40cecf18ef8b57d50584
|
[
"BSD-3-Clause"
] | null | null | null |
import pickle
import unittest
from neat.state_machine_genome_partially_fixed import StateMachineGenomeFixed
from tests.config_generation import init_fixed_genome_config
class TestStateMachineGenome(unittest.TestCase):
def test_fixed_layout(self):
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
config.fixed_section = 'layout'
genome.configure_new(config)
self.assertEqual(len(reference_genome.transitions), len(genome.transitions))
self.assertEqual(len(reference_genome.states), len(genome.states))
def test_fixed_states(self):
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
# Default fixed section are the states.
genome.configure_new(config)
# Check lengths.
self.assertEqual(0, len(genome.transitions))
self.assertEqual(2, len(genome.states))
# Check state availability.
self.assertIn(0, genome.states)
self.assertIn(1, genome.states)
# Check equality of the state layouts.
self.assertEqual(reference_genome.states[0].biases, genome.states[0].biases)
self.assertEqual(reference_genome.states[0].weights, genome.states[0].weights)
self.assertEqual(reference_genome.states[1].biases, genome.states[1].biases)
self.assertEqual(reference_genome.states[1].weights, genome.states[1].weights)
def test_fixed_transitions(self):
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
config.fixed_section = 'transitions'
genome.configure_new(config)
# Check lengths.
self.assertEqual(1, len(genome.transitions))
self.assertEqual(2, len(genome.states))
# Check state availability.
self.assertIn(0, genome.states)
self.assertIn(1, genome.states)
# Check transition availability
self.assertIn((0, 1), genome.transitions)
self.assertEqual(reference_genome.transitions[(0, 1)].conditions,
genome.transitions[(0, 1)].conditions)
# Manually checked that states do randomly initialise.
def test_state_fixed_with_mutation(self):
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
genome.configure_new(config)
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
# Do 100 mutations to check whether it always is the same.
for i in range(100):
genome.mutate(config)
self.assertEqual(2, len(genome.states))
# Check equality of the state layouts.
self.assertEqual(reference_genome.states[0].biases, genome.states[0].biases)
self.assertEqual(reference_genome.states[0].weights, genome.states[0].weights)
self.assertEqual(reference_genome.states[1].biases, genome.states[1].biases)
self.assertEqual(reference_genome.states[1].weights, genome.states[1].weights)
def test_transition_fixed_with_mutation(self):
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
config.fixed_section = 'transitions'
genome.configure_new(config)
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
# Do 100 mutations to check whether it always is the same.
for _ in range(100):
genome.mutate(config)
self.assertEqual(2, len(genome.states))
self.assertEqual(1, len(genome.transitions))
# Check transition availability
self.assertIn((0, 1), genome.transitions)
self.assertEqual(reference_genome.transitions[(0, 1)].conditions,
genome.transitions[(0, 1)].conditions)
def test_hard_copy_transition(self):
""" Ensure that a hard copy of the transitions is made, so they can be individually changed. """
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
config.fixed_section = 'transitions'
genome.configure_new(config)
# Check lengths.
self.assertEqual(1, len(genome.transitions))
# Check transition availability
self.assertIn((0, 1), genome.transitions)
genome.transitions[(0, 1)].conditions.append((1, 2, 3))
self.assertNotEqual(reference_genome.transitions[(0, 1)].conditions,
genome.transitions[(0, 1)].conditions)
def test_hard_copy_state(self):
""" Ensure that a hard copy of the states can be changed so they can be individually changed. """
reference_genome = pickle.load(open('test_genome.pickle', "rb"))
genome = StateMachineGenomeFixed(1)
config = init_fixed_genome_config()
config.fixed_section = 'states'
genome.configure_new(config)
genome.states[0].biases[0] = -1
genome.states[0].weights[0][0] = -1
self.assertNotEqual(reference_genome.states[0].biases, genome.states[0].biases)
self.assertNotEqual(reference_genome.states[0].weights, genome.states[0].weights)
| 38.814286
| 105
| 0.670961
| 625
| 5,434
| 5.688
| 0.136
| 0.108017
| 0.051196
| 0.084388
| 0.854008
| 0.811533
| 0.802532
| 0.802532
| 0.772433
| 0.75865
| 0
| 0.019203
| 0.223776
| 5,434
| 139
| 106
| 39.093525
| 0.823613
| 0.119065
| 0
| 0.705882
| 0
| 0
| 0.038857
| 0
| 0
| 0
| 0
| 0
| 0.352941
| 1
| 0.082353
| false
| 0
| 0.047059
| 0
| 0.141176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9f7195cb71f6932dcc548e2078b800e0e6d26aed
| 2,230
|
py
|
Python
|
day02/python/fallshare/solution.py
|
AlexisTM/aoc-2021
|
91a801b3c812cc3d37d6088a2544227cf158d114
|
[
"MIT"
] | 11
|
2021-11-28T10:36:54.000Z
|
2021-12-21T10:38:34.000Z
|
day02/python/fallshare/solution.py
|
AlexisTM/aoc-2021
|
91a801b3c812cc3d37d6088a2544227cf158d114
|
[
"MIT"
] | 83
|
2021-11-22T17:02:05.000Z
|
2022-01-29T10:27:31.000Z
|
day02/python/fallshare/solution.py
|
AlexisTM/aoc-2021
|
91a801b3c812cc3d37d6088a2544227cf158d114
|
[
"MIT"
] | 19
|
2021-11-22T20:47:57.000Z
|
2022-02-01T08:51:19.000Z
|
class Submarine:
depth = 0
horizontal_pos = 0
def down(self, depth):
self.depth += depth
def up(self, depth):
self.depth -= depth
def forward(self, distance):
self.horizontal_pos += distance
def get_depth(self):
return self.depth
def get_horizontal_pos(self):
return self.horizontal_pos
def dive(self, input):
with open(input, "r") as f:
for line in f:
(direction, value) = line.split(" ")
value = int(value)
if direction == "down":
self.down(value)
elif direction == "up":
self.up(value)
elif direction == "forward":
self.forward(value)
else:
raise Exception(f"Unknown direction!")
def get_Star1():
submarine = Submarine()
submarine.dive("input.txt")
result = submarine.get_depth() * submarine.get_horizontal_pos()
print(f"Result for first star: {result}")
class Aiming_Submarine:
depth = 0
horizontal_pos = 0
aim = 0
def down(self, depth):
self.aim += depth
def up(self, depth):
self.aim -= depth
def forward(self, distance):
self.horizontal_pos += distance
self.depth += distance * self.aim
def get_depth(self):
return self.depth
def get_horizontal_pos(self):
return self.horizontal_pos
def dive(self, input):
with open(input, "r") as f:
for line in f:
(direction, value) = line.split(" ")
value = int(value)
if direction == "down":
self.down(value)
elif direction == "up":
self.up(value)
elif direction == "forward":
self.forward(value)
else:
raise Exception(f"Unknown direction!")
def get_Star2():
submarine = Aiming_Submarine()
submarine.dive("input.txt")
result = submarine.get_depth() * submarine.get_horizontal_pos()
print(f"Result for second star: {result}")
get_Star1()
get_Star2()
| 26.547619
| 67
| 0.525112
| 241
| 2,230
| 4.759336
| 0.182573
| 0.113339
| 0.045336
| 0.043592
| 0.891892
| 0.891892
| 0.751526
| 0.751526
| 0.751526
| 0.660854
| 0
| 0.006406
| 0.369955
| 2,230
| 84
| 68
| 26.547619
| 0.809964
| 0
| 0
| 0.757576
| 0
| 0
| 0.06589
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212121
| false
| 0
| 0
| 0.060606
| 0.378788
| 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9f76afc8e551d49808510f885294946ce8a155f9
| 33,773
|
py
|
Python
|
src/ebay_rest/api/sell_account/api/return_policy_api.py
|
matecsaj/ebay_rest
|
dd23236f39e05636eff222f99df1e3699ce47d4a
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/sell_account/api/return_policy_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/sell_account/api/return_policy_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
# coding: utf-8
"""
Account API
The <b>Account API</b> gives sellers the ability to configure their eBay seller accounts, including the seller's policies (seller-defined custom policies and eBay business policies), opt in and out of eBay seller programs, configure sales tax tables, and get account information. <br><br>For details on the availability of the methods in this API, see <a href=\"/api-docs/sell/account/overview.html#requirements\">Account API requirements and restrictions</a>. # noqa: E501
OpenAPI spec version: v1.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...sell_account.api_client import ApiClient
class ReturnPolicyApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_return_policy(self, body, **kwargs): # noqa: E501
"""create_return_policy # noqa: E501
This method creates a new return policy where the policy encapsulates seller's terms for returning items. Use the Metadata API method <b>getReturnPolicies</b> to determine which categories require you to supply a return policy for the marketplace(s) into which you list. <br><br>Each policy targets a <b>marketplaceId</b> and <code>categoryTypes.</code><b>name</b> combination and you can create multiple policies for each combination. <br><br>A successful request returns the URI to the new policy in the <b>Location</b> response header and the ID for the new policy is returned in the response payload. <p class=\"tablenote\"><b>Tip:</b> For details on creating and using the business policies supported by the Account API, see <a href=\"/api-docs/sell/static/seller-accounts/business-policies.html\">eBay business policies</a>.</p> <p><b>Marketplaces and locales</b></p> <p>Policy instructions can be localized by providing a locale in the <code>Accept-Language</code> HTTP request header. For example, the following setting displays field values from the request body in German: <code>Accept-Language: de-DE</code>.</p> <p>Target the specific locale of a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, target the French locale of the Canadian marketplace by specifying the <code>fr-CA</code> locale for <code>Content-Language</code>. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>.</p> <p class=\"tablenote\"><b>Tip:</b> For details on headers, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\">HTTP request headers</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_return_policy(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReturnPolicyRequest body: Return policy request (required)
:return: SetReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_return_policy_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_return_policy_with_http_info(body, **kwargs) # noqa: E501
return data
def create_return_policy_with_http_info(self, body, **kwargs): # noqa: E501
"""create_return_policy # noqa: E501
This method creates a new return policy where the policy encapsulates seller's terms for returning items. Use the Metadata API method <b>getReturnPolicies</b> to determine which categories require you to supply a return policy for the marketplace(s) into which you list. <br><br>Each policy targets a <b>marketplaceId</b> and <code>categoryTypes.</code><b>name</b> combination and you can create multiple policies for each combination. <br><br>A successful request returns the URI to the new policy in the <b>Location</b> response header and the ID for the new policy is returned in the response payload. <p class=\"tablenote\"><b>Tip:</b> For details on creating and using the business policies supported by the Account API, see <a href=\"/api-docs/sell/static/seller-accounts/business-policies.html\">eBay business policies</a>.</p> <p><b>Marketplaces and locales</b></p> <p>Policy instructions can be localized by providing a locale in the <code>Accept-Language</code> HTTP request header. For example, the following setting displays field values from the request body in German: <code>Accept-Language: de-DE</code>.</p> <p>Target the specific locale of a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, target the French locale of the Canadian marketplace by specifying the <code>fr-CA</code> locale for <code>Content-Language</code>. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>.</p> <p class=\"tablenote\"><b>Tip:</b> For details on headers, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\">HTTP request headers</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_return_policy_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReturnPolicyRequest body: Return policy request (required)
:return: SetReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_return_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_return_policy`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetReturnPolicyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_return_policy(self, return_policy_id, **kwargs): # noqa: E501
"""delete_return_policy # noqa: E501
This method deletes a return policy. Supply the ID of the policy you want to delete in the <b>returnPolicyId</b> path parameter. Note that you cannot delete the default return policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_return_policy(return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str return_policy_id: This path parameter specifies the ID of the return policy you want to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_return_policy_with_http_info(return_policy_id, **kwargs) # noqa: E501
else:
(data) = self.delete_return_policy_with_http_info(return_policy_id, **kwargs) # noqa: E501
return data
def delete_return_policy_with_http_info(self, return_policy_id, **kwargs): # noqa: E501
"""delete_return_policy # noqa: E501
This method deletes a return policy. Supply the ID of the policy you want to delete in the <b>returnPolicyId</b> path parameter. Note that you cannot delete the default return policy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_return_policy_with_http_info(return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str return_policy_id: This path parameter specifies the ID of the return policy you want to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['return_policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_return_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'return_policy_id' is set
if ('return_policy_id' not in params or
params['return_policy_id'] is None):
raise ValueError("Missing the required parameter `return_policy_id` when calling `delete_return_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'return_policy_id' in params:
path_params['return_policy_id'] = params['return_policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy/{return_policy_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_return_policies(self, marketplace_id, **kwargs): # noqa: E501
"""get_return_policies # noqa: E501
This method retrieves all the return policies configured for the marketplace you specify using the <code>marketplace_id</code> query parameter. <br><br><b>Marketplaces and locales</b> <br><br>Get the correct policies for a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, get the policies for the French locale of the Canadian marketplace by specifying <code>fr-CA</code> for the <code>Content-Language</code> header. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>. For details on header values, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\" target=\"_blank\">HTTP request headers</a>. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policies(marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: This query parameter specifies the ID of the eBay marketplace of the policy you want to retrieve. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/account/types/ba:MarketplaceIdEnum (required)
:return: ReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_return_policies_with_http_info(marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.get_return_policies_with_http_info(marketplace_id, **kwargs) # noqa: E501
return data
def get_return_policies_with_http_info(self, marketplace_id, **kwargs): # noqa: E501
"""get_return_policies # noqa: E501
This method retrieves all the return policies configured for the marketplace you specify using the <code>marketplace_id</code> query parameter. <br><br><b>Marketplaces and locales</b> <br><br>Get the correct policies for a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, get the policies for the French locale of the Canadian marketplace by specifying <code>fr-CA</code> for the <code>Content-Language</code> header. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>. For details on header values, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\" target=\"_blank\">HTTP request headers</a>. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policies_with_http_info(marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: This query parameter specifies the ID of the eBay marketplace of the policy you want to retrieve. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/account/types/ba:MarketplaceIdEnum (required)
:return: ReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['marketplace_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_return_policies" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'marketplace_id' is set
if ('marketplace_id' not in params or
params['marketplace_id'] is None):
raise ValueError("Missing the required parameter `marketplace_id` when calling `get_return_policies`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'marketplace_id' in params:
query_params.append(('marketplace_id', params['marketplace_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReturnPolicyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_return_policy(self, return_policy_id, **kwargs): # noqa: E501
"""get_return_policy # noqa: E501
This method retrieves the complete details of the return policy specified by the <b>returnPolicyId</b> path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policy(return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str return_policy_id: This path parameter specifies the of the return policy you want to retrieve. (required)
:return: ReturnPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_return_policy_with_http_info(return_policy_id, **kwargs) # noqa: E501
else:
(data) = self.get_return_policy_with_http_info(return_policy_id, **kwargs) # noqa: E501
return data
def get_return_policy_with_http_info(self, return_policy_id, **kwargs): # noqa: E501
"""get_return_policy # noqa: E501
This method retrieves the complete details of the return policy specified by the <b>returnPolicyId</b> path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policy_with_http_info(return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str return_policy_id: This path parameter specifies the of the return policy you want to retrieve. (required)
:return: ReturnPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['return_policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_return_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'return_policy_id' is set
if ('return_policy_id' not in params or
params['return_policy_id'] is None):
raise ValueError("Missing the required parameter `return_policy_id` when calling `get_return_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'return_policy_id' in params:
path_params['return_policy_id'] = params['return_policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy/{return_policy_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReturnPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_return_policy_by_name(self, marketplace_id, name, **kwargs): # noqa: E501
"""get_return_policy_by_name # noqa: E501
This method retrieves the complete details of a single return policy. Supply both the policy <code>name</code> and its associated <code>marketplace_id</code> in the request query parameters. <br><br><b>Marketplaces and locales</b> <br><br>Get the correct policy for a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, get a policy for the French locale of the Canadian marketplace by specifying <code>fr-CA</code> for the <code>Content-Language</code> header. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>. For details on header values, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\">HTTP request headers</a>. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policy_by_name(marketplace_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: This query parameter specifies the ID of the eBay marketplace of the policy you want to retrieve. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/account/types/ba:MarketplaceIdEnum (required)
:param str name: This query parameter specifies the user-defined name of the return policy you want to retrieve. (required)
:return: ReturnPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_return_policy_by_name_with_http_info(marketplace_id, name, **kwargs) # noqa: E501
else:
(data) = self.get_return_policy_by_name_with_http_info(marketplace_id, name, **kwargs) # noqa: E501
return data
def get_return_policy_by_name_with_http_info(self, marketplace_id, name, **kwargs): # noqa: E501
"""get_return_policy_by_name # noqa: E501
This method retrieves the complete details of a single return policy. Supply both the policy <code>name</code> and its associated <code>marketplace_id</code> in the request query parameters. <br><br><b>Marketplaces and locales</b> <br><br>Get the correct policy for a marketplace that supports multiple locales using the <code>Content-Language</code> request header. For example, get a policy for the French locale of the Canadian marketplace by specifying <code>fr-CA</code> for the <code>Content-Language</code> header. Likewise, target the Dutch locale of the Belgium marketplace by setting <code>Content-Language: nl-BE</code>. For details on header values, see <a href=\"/api-docs/static/rest-request-components.html#HTTP\">HTTP request headers</a>. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_return_policy_by_name_with_http_info(marketplace_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: This query parameter specifies the ID of the eBay marketplace of the policy you want to retrieve. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/sell/account/types/ba:MarketplaceIdEnum (required)
:param str name: This query parameter specifies the user-defined name of the return policy you want to retrieve. (required)
:return: ReturnPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['marketplace_id', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_return_policy_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'marketplace_id' is set
if ('marketplace_id' not in params or
params['marketplace_id'] is None):
raise ValueError("Missing the required parameter `marketplace_id` when calling `get_return_policy_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_return_policy_by_name`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'marketplace_id' in params:
query_params.append(('marketplace_id', params['marketplace_id'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy/get_by_policy_name', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReturnPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_return_policy(self, body, return_policy_id, **kwargs): # noqa: E501
"""update_return_policy # noqa: E501
This method updates an existing return policy. Specify the policy you want to update using the <b>return_policy_id</b> path parameter. Supply a complete policy payload with the updates you want to make; this call overwrites the existing policy with the new details specified in the payload. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_return_policy(body, return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReturnPolicyRequest body: Container for a return policy request. (required)
:param str return_policy_id: This path parameter specifies the ID of the return policy you want to update. (required)
:return: SetReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_return_policy_with_http_info(body, return_policy_id, **kwargs) # noqa: E501
else:
(data) = self.update_return_policy_with_http_info(body, return_policy_id, **kwargs) # noqa: E501
return data
def update_return_policy_with_http_info(self, body, return_policy_id, **kwargs): # noqa: E501
"""update_return_policy # noqa: E501
This method updates an existing return policy. Specify the policy you want to update using the <b>return_policy_id</b> path parameter. Supply a complete policy payload with the updates you want to make; this call overwrites the existing policy with the new details specified in the payload. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_return_policy_with_http_info(body, return_policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReturnPolicyRequest body: Container for a return policy request. (required)
:param str return_policy_id: This path parameter specifies the ID of the return policy you want to update. (required)
:return: SetReturnPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'return_policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_return_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_return_policy`") # noqa: E501
# verify the required parameter 'return_policy_id' is set
if ('return_policy_id' not in params or
params['return_policy_id'] is None):
raise ValueError("Missing the required parameter `return_policy_id` when calling `update_return_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'return_policy_id' in params:
path_params['return_policy_id'] = params['return_policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/return_policy/{return_policy_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetReturnPolicyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.123397
| 1,700
| 0.664555
| 4,321
| 33,773
| 5
| 0.06642
| 0.076094
| 0.034344
| 0.019995
| 0.957926
| 0.954686
| 0.950336
| 0.941541
| 0.935756
| 0.928628
| 0
| 0.010986
| 0.248038
| 33,773
| 623
| 1,701
| 54.210273
| 0.839739
| 0.503805
| 0
| 0.792793
| 0
| 0
| 0.200013
| 0.049372
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039039
| false
| 0
| 0.012012
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9f8467ccf84a676eb72dbd9bcad95a3c1e7eceab
| 97
|
py
|
Python
|
src/rl/tf/values/__init__.py
|
djjh/reinforcement-learning-labs
|
22706dab9e7f16e364ee4ed79c0bd67a343e5b08
|
[
"MIT"
] | 1
|
2019-10-06T11:45:52.000Z
|
2019-10-06T11:45:52.000Z
|
src/rl/tf/values/__init__.py
|
djjh/reinforcement-learning-labs
|
22706dab9e7f16e364ee4ed79c0bd67a343e5b08
|
[
"MIT"
] | null | null | null |
src/rl/tf/values/__init__.py
|
djjh/reinforcement-learning-labs
|
22706dab9e7f16e364ee4ed79c0bd67a343e5b08
|
[
"MIT"
] | null | null | null |
from .value_function import ValueFunction
from .linear_value_function import LinearValueFunction
| 32.333333
| 54
| 0.896907
| 11
| 97
| 7.636364
| 0.636364
| 0.309524
| 0.452381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 97
| 2
| 55
| 48.5
| 0.94382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9fa026af42b3e399726e5fe0e070d9ea52626cdf
| 21,799
|
py
|
Python
|
tests/server/scheduled_tasks/buffer_jobs_tasks_test.py
|
WIPACrepo/iceprod
|
83615da9b0e764bc2498ac588cc2e2b3f5277235
|
[
"MIT"
] | 2
|
2017-01-23T17:12:41.000Z
|
2019-01-14T13:38:17.000Z
|
tests/server/scheduled_tasks/buffer_jobs_tasks_test.py
|
WIPACrepo/iceprod
|
83615da9b0e764bc2498ac588cc2e2b3f5277235
|
[
"MIT"
] | 242
|
2016-05-09T18:46:51.000Z
|
2022-03-31T22:02:29.000Z
|
tests/server/scheduled_tasks/buffer_jobs_tasks_test.py
|
WIPACrepo/iceprod
|
83615da9b0e764bc2498ac588cc2e2b3f5277235
|
[
"MIT"
] | 2
|
2017-03-27T09:13:40.000Z
|
2019-01-27T10:55:30.000Z
|
"""
Test script for scheduled_tasks/buffer_jobs_tasks_test
"""
import logging
logger = logging.getLogger('scheduled_tasks_buffer_jobs_tasks_test')
import os
import sys
import shutil
import tempfile
import unittest
from functools import partial
from unittest.mock import patch, MagicMock
from tornado.testing import AsyncTestCase
from rest_tools.client import RestClient
from tests.util import unittest_reporter, glob_tests
from iceprod.server.modules.schedule import schedule
from iceprod.server.scheduled_tasks import buffer_jobs_tasks
class buffer_jobs_tasks_test(AsyncTestCase):
def setUp(self):
super(buffer_jobs_tasks_test,self).setUp()
self.test_dir = tempfile.mkdtemp(dir=os.getcwd())
def cleanup():
shutil.rmtree(self.test_dir)
self.addCleanup(cleanup)
self.cfg = {
'queue':{
'init_queue_interval':0.1,
'submit_dir':self.test_dir,
'*':{'type':'Test1','description':'d'},
},
'master':{
'url':False,
},
'site_id':'abcd',
}
@unittest_reporter
def test_100_buffer_jobs_tasks(self):
s = schedule(self.cfg,None,None,None)
buffer_jobs_tasks.buffer_jobs_tasks(s)
@unittest_reporter
async def test_200_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':[]},
{'name':'b','requirements':{'memory':4.5},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 2)
self.assertTrue(len(tasks) == 4)
self.assertEqual([0,0,1,1], [t['job_id'] for t in tasks])
self.assertEqual([[],[0],[],[2]], [t['depends'] for t in tasks])
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':[]},
{'name':'b','requirements':{},'depends':[0]},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 2)
self.assertTrue(len(tasks) == 4)
self.assertEqual([0,0,1,1], [t['job_id'] for t in tasks])
self.assertEqual([[],[0],[],[2]], [t['depends'] for t in tasks])
job_ids = list(range(1,2))
task_ids = list(range(2,4))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {0:{'job_index':0}}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':[]},
{'name':'b','requirements':{},'depends':[0]},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 1)
self.assertTrue(len(tasks) == 2)
self.assertEqual([1,1], [t['job_id'] for t in tasks])
self.assertEqual([[],[2]], [t['depends'] for t in tasks])
@unittest_reporter(name='run() - ext dep')
async def test_201_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2,4))
task_ids = list(range(4,8))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/datasets/bar/tasks'):
return {
0:{'task_id':0,'job_id':0,'name':'generate','task_index':0},
1:{'task_id':1,'job_id':0,'name':'filter','task_index':1},
2:{'task_id':2,'job_id':1,'name':'generate','task_index':0},
3:{'task_id':3,'job_id':1,'name':'filter','task_index':1},
}
elif url == '/jobs/0':
return {'job_id':0,'job_index':0}
elif url == '/jobs/1':
return {'job_id':1,'job_index':1}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':['bar:1']},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 2)
self.assertTrue(len(tasks) == 4)
self.assertEqual([2,2,3,3], [t['job_id'] for t in tasks])
self.assertEqual([[1],[4],[3],[6]], [t['depends'] for t in tasks])
job_ids = list(range(2,4))
task_ids = list(range(4,8))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url == '/tasks/3':
return {'task_id':3,'name':'filter','task_index':3}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':[3]},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 2)
self.assertTrue(len(tasks) == 4)
self.assertEqual([2,2,3,3], [t['job_id'] for t in tasks])
self.assertEqual([[3],[4],[3],[6]], [t['depends'] for t in tasks])
@unittest_reporter(name='run() - dep err')
async def test_202_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
logger.info('RPC: %s %s', method, url)
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':['b']},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
with self.assertRaises(Exception):
await buffer_jobs_tasks.run(rc, debug=True)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
logger.info('RPC: %s %s', method, url)
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':[0]},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
with self.assertRaises(Exception):
await buffer_jobs_tasks.run(rc, debug=True)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
logger.info('RPC: %s %s', method, url)
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'jobs_submitted':2, 'tasks_submitted':2}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':['lalala']},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
with self.assertRaises(Exception):
await buffer_jobs_tasks.run(rc, debug=True)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
logger.info('RPC: %s %s', method, url)
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/datasets/bar/tasks'):
return {
0:{'task_id':0,'name':'generate','task_index':0},
1:{'task_id':1,'name':'filter','task_index':1},
2:{'task_id':2,'name':'generate','task_index':2},
3:{'task_id':3,'name':'filter','task_index':3},
}
elif url.startswith('/config'):
return {'options':{},'tasks': [
{'name':'a','requirements':{},'depends':['bar:lalala']},
{'name':'b','requirements':{},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
with self.assertRaises(Exception):
await buffer_jobs_tasks.run(rc, debug=True)
@unittest_reporter(name='run() - req uses config')
async def test_205_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2,4))
task_ids = list(range(4,8))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url == '/datasets/foo/jobs':
return {}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url.startswith('/datasets/bar/tasks'):
return {
0:{'task_id':0,'name':'generate','task_index':0},
1:{'task_id':1,'name':'filter','task_index':1},
2:{'task_id':2,'name':'generate','task_index':2},
3:{'task_id':3,'name':'filter','task_index':3},
}
elif url.startswith('/config'):
return {'steering':{
'parameters': {'test':'$eval(3+2.4)'}
},'options':{},
'tasks': [
{'name':'a','requirements':{'cpu':'$eval(1+1)'},'depends':[]},
{'name':'b','requirements':{'memory':'$eval($steering(test)+$(job))'},'depends':['a']},
]}
elif url == '/jobs' and method == 'POST':
jobs.append(args)
return {'result': job_ids.pop(0)}
elif url == '/tasks' and method == 'POST':
tasks.append(args)
return {'result': task_ids.pop(0)}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertTrue(len(jobs) == 2)
self.assertTrue(len(tasks) == 4)
self.assertEqual([2,2,3,3], [t['job_id'] for t in tasks])
self.assertEqual([2,1,2,1], [t['requirements']['cpu'] for t in tasks])
self.assertEqual([1.0,5.4,1.0,6.4], [t['requirements']['memory'] for t in tasks])
@unittest_reporter(name='run() - no buffer')
async def test_210_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url.startswith('/datasets/foo/task_counts'):
return {'waiting':30000}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertFalse(jobs)
self.assertFalse(tasks)
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
return {'processing':['foo']}
elif url == '/datasets/foo':
client.called = True
return {'dataset':1,'jobs_submitted':2, 'tasks_submitted':2,'debug':True}
elif url.startswith('/datasets/foo/task_counts'):
return {}
elif url == '/datasets/foo/jobs':
return {0:{},1:{}}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertFalse(jobs)
self.assertFalse(tasks)
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
client.called = True
return {}
else:
raise Exception()
client.called = False
rc.request = client
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertFalse(jobs)
self.assertFalse(tasks)
@unittest_reporter(name='run() - error')
async def test_300_run(self):
rc = MagicMock(spec=RestClient)
job_ids = list(range(2))
task_ids = list(range(4))
jobs = []
tasks = []
async def client(method, url, args=None):
if url.startswith('/dataset_summaries'):
client.called = True
return {'processing':['foo']}
else:
raise Exception()
client.called = False
rc.request = client
with self.assertRaises(Exception):
await buffer_jobs_tasks.run(rc, debug=True)
self.assertTrue(client.called)
self.assertFalse(jobs)
self.assertFalse(tasks)
# internally catch the error
await buffer_jobs_tasks.run(rc)
def load_tests(loader, tests, pattern):
suite = unittest.TestSuite()
alltests = glob_tests(loader.getTestCaseNames(buffer_jobs_tasks_test))
suite.addTests(loader.loadTestsFromNames(alltests,buffer_jobs_tasks_test))
return suite
| 41.442966
| 108
| 0.490619
| 2,266
| 21,799
| 4.619153
| 0.071933
| 0.047483
| 0.035827
| 0.039553
| 0.868253
| 0.856024
| 0.848381
| 0.833859
| 0.83061
| 0.827362
| 0
| 0.017123
| 0.357035
| 21,799
| 525
| 109
| 41.521905
| 0.729666
| 0.003762
| 0
| 0.814516
| 0
| 0
| 0.165502
| 0.017324
| 0
| 0
| 0
| 0
| 0.096774
| 1
| 0.008065
| false
| 0
| 0.02621
| 0
| 0.209677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e287397fb37d9797864b88e05cb0712e90859b14
| 74
|
py
|
Python
|
timeserver/timegetter.py
|
toplenboren/timelier
|
a370e11ee85361a672fdb1eb8bac4e9fa82d1f5d
|
[
"MIT"
] | null | null | null |
timeserver/timegetter.py
|
toplenboren/timelier
|
a370e11ee85361a672fdb1eb8bac4e9fa82d1f5d
|
[
"MIT"
] | null | null | null |
timeserver/timegetter.py
|
toplenboren/timelier
|
a370e11ee85361a672fdb1eb8bac4e9fa82d1f5d
|
[
"MIT"
] | null | null | null |
import datetime
def get_time_from_os():
return datetime.datetime.now
| 14.8
| 32
| 0.783784
| 11
| 74
| 5
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148649
| 74
| 4
| 33
| 18.5
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e2ab10908ff36708dcf7b11417bbf53ef175aa3f
| 13,191
|
py
|
Python
|
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_managed_port_security.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2021-01-03T01:47:51.000Z
|
2021-01-03T01:47:51.000Z
|
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_managed_port_security.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | null | null | null |
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_managed_port_security.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2020-10-16T12:04:39.000Z
|
2020-10-16T12:04:39.000Z
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from netaddr import IPNetwork
from oslo_log import log as logging
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.test import decorators
from nuage_tempest_plugin.lib.test import nuage_test
from nuage_tempest_plugin.lib.test import tags
from nuage_tempest_plugin.lib.utils import constants
from nuage_tempest_plugin.tests.api.vsd_managed \
import base_vsd_managed_networks
CONF = config.CONF
LOG = logging.getLogger(__name__)
@nuage_test.class_header(tags=tags.VSD_MANAGED)
class VSDManagedPortSecurity(
base_vsd_managed_networks.BaseVSDManagedNetwork):
@decorators.attr(type='smoke')
def test_create_port_security_disabled_l3(self):
name = data_utils.rand_name('l3domain-')
vsd_l3dom_tmplt = self.create_vsd_l3dom_template(
name=name)
vsd_l3dom = self.create_vsd_l3domain(name=name,
tid=vsd_l3dom_tmplt[0]['ID'])
zonename = data_utils.rand_name('l3dom-zone-')
vsd_zone = self.create_vsd_zone(name=zonename,
domain_id=vsd_l3dom[0]['ID'])
subname = data_utils.rand_name('l3dom-sub-')
cidr = IPNetwork('10.10.100.0/24')
extra_params = {}
vsd_subnet = self.create_vsd_l3domain_subnet(
name=subname,
zone_id=vsd_zone[0]['ID'],
cidr=cidr,
gateway='10.10.100.1',
extra_params=extra_params)
net_name = data_utils.rand_name('network-vsd-managed-')
net = self.create_network(network_name=net_name)
np = CONF.nuage.nuage_default_netpartition
self.create_subnet(net,
cidr=cidr,
mask_bits=24,
nuagenet=vsd_subnet[0]['ID'],
net_partition=np)
post_body = {'network_id': net['id'],
'port_security_enabled': 'False'}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.SUBNETWORK,
vsd_subnet[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_create_port_security_managed_l2(self):
name = data_utils.rand_name('l2domain-')
cidr = IPNetwork('10.10.100.0/24')
vsd_l2dom_tmplt = self.create_vsd_dhcpmanaged_l2dom_template(
name=name, cidr=cidr, gateway='10.10.100.1')
vsd_l2dom = self.create_vsd_l2domain(name=name,
tid=vsd_l2dom_tmplt[0]['ID'])
# create subnet on OS with nuagenet param set to l2domain UUID
net_name = data_utils.rand_name('network-')
net = self.create_network(network_name=net_name)
self.create_subnet(
net, gateway=None,
cidr=cidr, mask_bits=24, nuagenet=vsd_l2dom[0]['ID'],
net_partition=CONF.nuage.nuage_default_netpartition,
enable_dhcp=True)
post_body = {'network_id': net['id'],
'port_security_enabled': 'False'}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_create_port_security_unmanaged_l2(self):
name = data_utils.rand_name('l2domain-')
vsd_l2dom_tmplt = self.create_vsd_dhcpunmanaged_l2dom_template(
name=name)
vsd_l2dom = self.create_vsd_l2domain(name=name,
tid=vsd_l2dom_tmplt[0]['ID'])
self.assertEqual(vsd_l2dom[0]['name'], name)
# create subnet on OS with nuagenet param set to l2domain UUID
net_name = data_utils.rand_name('network-')
net = self.create_network(network_name=net_name)
self.create_subnet(
net,
cidr=IPNetwork('10.10.100.0/24'),
mask_bits=24, nuagenet=vsd_l2dom[0]['ID'],
net_partition=CONF.nuage.nuage_default_netpartition,
enable_dhcp=False)
post_body = {'network_id': net['id'],
'port_security_enabled': 'False'}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_update_port_security_l3(self):
name = data_utils.rand_name('l3domain-')
vsd_l3dom_tmplt = self.create_vsd_l3dom_template(
name=name)
vsd_l3dom = self.create_vsd_l3domain(name=name,
tid=vsd_l3dom_tmplt[0]['ID'])
zonename = data_utils.rand_name('l3dom-zone-')
vsd_zone = self.create_vsd_zone(name=zonename,
domain_id=vsd_l3dom[0]['ID'])
subname = data_utils.rand_name('l3dom-sub-')
cidr = IPNetwork('10.10.100.0/24')
extra_params = {}
vsd_subnet = self.create_vsd_l3domain_subnet(
name=subname,
zone_id=vsd_zone[0]['ID'],
cidr=cidr,
gateway='10.10.100.1',
extra_params=extra_params)
net_name = data_utils.rand_name('network-vsd-managed-')
net = self.create_network(network_name=net_name)
np = CONF.nuage.nuage_default_netpartition
self.create_subnet(net,
cidr=cidr,
mask_bits=24,
nuagenet=vsd_subnet[0]['ID'],
net_partition=np)
post_body = {'network_id': net['id']}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.SUBNETWORK,
vsd_subnet[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
update_body = {'security_groups': [],
'port_security_enabled': 'False'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.SUBNETWORK,
vsd_subnet[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
update_body = {'port_security_enabled': 'True'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.SUBNETWORK,
vsd_subnet[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.DISABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_update_port_security_managed_l2(self):
name = data_utils.rand_name('l2domain-')
cidr = IPNetwork('10.10.100.0/24')
vsd_l2dom_tmplt = self.create_vsd_dhcpmanaged_l2dom_template(
name=name, cidr=cidr, gateway='10.10.100.1')
vsd_l2dom = self.create_vsd_l2domain(name=name,
tid=vsd_l2dom_tmplt[0]['ID'])
# create subnet on OS with nuagenet param set to l2domain UUID
net_name = data_utils.rand_name('network-')
net = self.create_network(network_name=net_name)
self.create_subnet(
net, gateway=None,
cidr=cidr, mask_bits=24, nuagenet=vsd_l2dom[0]['ID'],
net_partition=CONF.nuage.nuage_default_netpartition,
enable_dhcp=True)
post_body = {'network_id': net['id']}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
update_body = {'security_groups': [],
'port_security_enabled': 'False'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
update_body = {'port_security_enabled': 'True'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.DISABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_update_port_security_unmanaged_l2(self):
name = data_utils.rand_name('l2domain-')
vsd_l2dom_tmplt = self.create_vsd_dhcpunmanaged_l2dom_template(
name=name)
vsd_l2dom = self.create_vsd_l2domain(name=name,
tid=vsd_l2dom_tmplt[0]['ID'])
self.assertEqual(vsd_l2dom[0]['name'], name)
# create subnet on OS with nuagenet param set to l2domain UUID
net_name = data_utils.rand_name('network-')
net = self.create_network(network_name=net_name)
self.create_subnet(
net,
cidr=IPNetwork('10.10.100.0/24'),
mask_bits=24, nuagenet=vsd_l2dom[0]['ID'],
net_partition=CONF.nuage.nuage_default_netpartition,
enable_dhcp=False)
post_body = {'network_id': net['id']}
self._configure_smart_nic_attributes(post_body)
body = self.ports_client.create_port(**post_body)
port = body['port']
self.addCleanup(self.ports_client.delete_port, port['id'])
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
update_body = {'security_groups': [],
'port_security_enabled': 'False'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
update_body = {'port_security_enabled': 'True'}
self.ports_client.update_port(port['id'], **update_body)
nuage_vport = self.nuage_client.get_vport(
constants.L2_DOMAIN,
vsd_l2dom[0]['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.DISABLED,
nuage_vport[0]['addressSpoofing'])
| 42.009554
| 78
| 0.604806
| 1,536
| 13,191
| 4.906901
| 0.114583
| 0.03715
| 0.035823
| 0.036089
| 0.884835
| 0.881518
| 0.881518
| 0.87223
| 0.87223
| 0.865862
| 0
| 0.024377
| 0.28474
| 13,191
| 313
| 79
| 42.14377
| 0.774457
| 0.064438
| 0
| 0.920152
| 0
| 0
| 0.090083
| 0.015338
| 0
| 0
| 0
| 0
| 0.053232
| 1
| 0.022814
| false
| 0
| 0.034221
| 0
| 0.060837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2c1491241290d30347c029922774987bdc3f2ee
| 17,193
|
py
|
Python
|
src/Inference/patch/UNET_utils.py
|
kshannon/lung-nodule-localization
|
8b42844fcd4abf30bd15bbc1f220cc5d12c77382
|
[
"MIT"
] | 3
|
2017-08-07T09:54:30.000Z
|
2017-11-23T23:00:02.000Z
|
src/Inference/patch/UNET_utils.py
|
kshannon/lung-nodule-localization
|
8b42844fcd4abf30bd15bbc1f220cc5d12c77382
|
[
"MIT"
] | 1
|
2018-09-10T13:28:17.000Z
|
2019-11-16T06:57:08.000Z
|
src/Inference/patch/UNET_utils.py
|
kshannon/ucsd-dse-capstone
|
8b42844fcd4abf30bd15bbc1f220cc5d12c77382
|
[
"MIT"
] | 3
|
2018-04-21T19:02:00.000Z
|
2018-04-27T23:30:00.000Z
|
import pandas as pd
import numpy as np
from keras.models import load_model
import h5py
import pandas as pd
import argparse
import tensorflow as tf
import SimpleITK as sitk
import matplotlib.pyplot as plt
from PIL import Image
import os
import tensorflow as tf
import keras
def dice_coef_loss(target, prediction, axis=(1,2,3), smooth=1e-5):
"""
Sorenson Dice loss
Using -log(Dice) as the loss since it is better behaved.
Also, the log allows avoidance of the division which
can help prevent underflow when the numbers are very small.
"""
intersection = tf.reduce_sum(prediction * target, axis=axis)
p = tf.reduce_sum(prediction, axis=axis)
t = tf.reduce_sum(target, axis=axis)
numerator = tf.reduce_mean(2. * intersection + smooth)
denominator = tf.reduce_mean(t + p + smooth)
dice_loss = -tf.log(numerator) + tf.log(denominator)
return dice_loss
def dice_coef(target, prediction, axis=(1, 2, 3), smooth=1e-5):
"""
Sorenson Dice
"""
intersection = tf.reduce_sum(prediction * target, axis=axis)
p = tf.reduce_sum(prediction, axis=axis)
t = tf.reduce_sum(target, axis=axis)
dice = (2. * intersection + smooth) / (t + p + smooth)
return tf.reduce_mean(dice)
def normalize_HU(img):
maxHU, minHU = 400., -1000.
img = (img - minHU) / (maxHU - minHU)
img[img>1] = 1.
img[img<0] = 0.
return img
def normalize_img(img):
pixel_spacing = [1.0, 1.0, 1.0] # New Voxel spacing in mm
new_x_size = img.GetSpacing()[0]*img.GetWidth() # Number of Voxels you want for x dimension
new_y_size = img.GetSpacing()[1]*img.GetHeight() # Number of Voxels you want for y dimension
new_z_size = img.GetSpacing()[2]*img.GetDepth() # Number of Voxels you want for z dimesion
new_size = [new_x_size, new_y_size, new_z_size]
new_spacing = pixel_spacing # mm per voxel (x,y,z) (h, w, d)
new_size = np.rint(np.array(new_size) / np.array(new_spacing)).astype(int)
interpolator_type = sitk.sitkBSpline
img_norm = sitk.Resample(img, np.array(new_size, dtype='uint32').tolist(), sitk.Transform(), interpolator_type, img.GetOrigin(), new_spacing, img.GetDirection(), 0.0, img.GetPixelIDValue())
img_norm.SetOrigin(np.array(img.GetOrigin()) / np.array(new_spacing))
return img_norm
def makeMasks():
masks = {}
max_radius = 16
patch_dim = 64
for radius in range(max_radius):
mask = np.zeros((patch_dim,patch_dim,patch_dim,1))
if radius > 0:
for i in range(patch_dim):
for j in range(patch_dim):
for k in range(patch_dim):
half = patch_dim/2
if (np.sqrt((i-half)**2+(j-half)**2+(k-half)**2) <=radius):
mask[i,j,k]=1
masks[radius]=mask
return masks
def create_unet3D_Model_A(input_img, use_upsampling=False, n_out=1, dropout=0.2,
print_summary = False):
"""
3D U-Net model - Model-A
"""
concat_axis = -1
data_format = "channels_last"
# print("3D U-Net Segmentation")
# Set keras learning phase to train
keras.backend.set_learning_phase(True)
# Don"t initialize variables on the fly
keras.backend.manual_variable_initialization(False)
inputs = keras.layers.Input(shape=input_img, name="Input_Image")
# Use below if wanted to use batch normalization and Relu activation separately
params = dict(kernel_size=(3, 3, 3), activation=None,
padding="same", data_format=data_format,
kernel_initializer="he_uniform")
# params = dict(kernel_size=(3, 3, 3), activation="relu",
# padding="same", data_format=data_format,
# kernel_initializer="he_uniform")
conv1 = keras.layers.Conv3D(name="conv1a", filters=32, **params)(inputs)
# conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
conv1 = keras.layers.Conv3D(name="conv1b", filters=64, **params)(conv1)
# conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
pool1 = keras.layers.MaxPooling3D(name="pool1", pool_size=(2, 2, 2))(conv1)
conv2 = keras.layers.Conv3D(name="conv2a", filters=64, **params)(pool1)
# conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
conv2 = keras.layers.Conv3D(name="conv2b", filters=128, **params)(conv2)
# conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
pool2 = keras.layers.MaxPooling3D(name="pool2", pool_size=(2, 2, 2))(conv2)
conv3 = keras.layers.Conv3D(name="conv3a", filters=128, **params)(pool2)
# conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
conv3 = keras.layers.Dropout(dropout)(conv3) ### Trying dropout layers earlier on, as indicated in the paper
conv3 = keras.layers.Conv3D(name="conv3b", filters=256, **params)(conv3)
# conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
if use_upsampling:
up3 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up3", size=(2, 2, 2))(conv3), conv2], axis=concat_axis)
else:
up3 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv3", filters=256, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv3), conv2], axis=concat_axis)
conv4 = keras.layers.Conv3D(name="conv4a", filters=128, **params)(up3)
conv4 = keras.layers.Activation('relu')(conv4)
conv4 = keras.layers.Conv3D(name="conv4b", filters=128, **params)(conv4)
conv4 = keras.layers.Activation('relu')(conv4)
if use_upsampling:
up4 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up4", size=(2, 2, 2))(conv4), conv1], axis=concat_axis)
else:
up4 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv4", filters=128, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv4), conv1], axis=concat_axis)
conv5 = keras.layers.Conv3D(name="conv5a", filters=64, **params)(up4)
conv5 = keras.layers.Activation('relu')(conv5)
conv5 = keras.layers.Conv3D(name="conv5b", filters=32, **params)(conv5)
conv5 = keras.layers.Activation('relu')(conv5)
pred_msk = keras.layers.Conv3D(name="PredictionMask", filters=n_out, kernel_size=(1, 1, 1),
data_format=data_format, activation="sigmoid")(conv5)
model = keras.models.Model(inputs=[inputs], outputs=[pred_msk, conv5])
if print_summary:
#model = keras.models.Model(inputs=[inputs], outputs=[class_pred])
model.summary()
# return pred
return model
def create_UNET3D(input_img, use_upsampling=False, n_out=1, dropout=0.2,
print_summary = False):
"""
3D U-Net model architecture creation
"""
concat_axis = -1
data_format = "channels_last"
keras.backend.set_learning_phase(True)
# Don"t initialize variables on the fly
keras.backend.manual_variable_initialization(False)
inputs = keras.layers.Input(shape=input_img, name="Input_Image")
# Use below if wanted to use batch normalization and Relu activation separately
params = dict(kernel_size=(3, 3, 3), activation=None,
padding="same", data_format=data_format,
kernel_initializer="he_uniform")
conv1 = keras.layers.Conv3D(name="conv1a", filters=32, **params)(inputs)
conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
conv1 = keras.layers.Conv3D(name="conv1b", filters=64, **params)(conv1)
conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
pool1 = keras.layers.MaxPooling3D(name="pool1", pool_size=(2, 2, 2))(conv1)
conv2 = keras.layers.Conv3D(name="conv2a", filters=64, **params)(pool1)
conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
conv2 = keras.layers.Conv3D(name="conv2b", filters=128, **params)(conv2)
conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
pool2 = keras.layers.MaxPooling3D(name="pool2", pool_size=(2, 2, 2))(conv2)
conv3 = keras.layers.Conv3D(name="conv3a", filters=128, **params)(pool2)
conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
conv3 = keras.layers.Dropout(dropout)(conv3)
conv3 = keras.layers.Conv3D(name="conv3b", filters=256, **params)(conv3)
conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
pool3 = keras.layers.MaxPooling3D(name="pool3", pool_size=(2, 2, 2))(conv3)
conv4 = keras.layers.Conv3D(name="conv4a", filters=256, **params)(pool3)
conv4 = keras.layers.BatchNormalization(axis =-1)(conv4)
conv4 = keras.layers.Activation('relu')(conv4)
conv4 = keras.layers.Dropout(dropout)(conv4)
conv4 = keras.layers.Conv3D(name="conv4b", filters=512, **params)(conv4)
conv4 = keras.layers.BatchNormalization(axis =-1)(conv4)
conv4 = keras.layers.Activation('relu')(conv4)
if use_upsampling:
up4 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up4", size=(2, 2, 2))(conv4), conv3], axis=concat_axis)
else:
up4 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv4", filters=512, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv4), conv3], axis=concat_axis)
conv5 = keras.layers.Conv3D(name="conv5a", filters=256, **params)(up4)
conv5 = keras.layers.BatchNormalization(axis =-1)(conv5)
conv5 = keras.layers.Activation('relu')(conv5)
conv5 = keras.layers.Conv3D(name="conv5b", filters=256, **params)(conv5)
conv5 = keras.layers.BatchNormalization(axis =-1)(conv5)
conv5 = keras.layers.Activation('relu')(conv5)
if use_upsampling:
up5 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up5", size=(2, 2, 2))(conv5), conv2], axis=concat_axis)
else:
up5 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv5", filters=256, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv5), conv2], axis=concat_axis)
conv6 = keras.layers.Conv3D(name="conv6a", filters=128, **params)(up5)
conv6 = keras.layers.BatchNormalization(axis =-1)(conv6)
conv6 = keras.layers.Activation('relu')(conv6)
conv6 = keras.layers.Conv3D(name="conv6b", filters=128, **params)(conv6)
conv6 = keras.layers.BatchNormalization(axis =-1)(conv6)
conv6 = keras.layers.Activation('relu')(conv6)
if use_upsampling:
up6 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up6", size=(2, 2, 2))(conv6), conv1], axis=concat_axis)
else:
up6 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv6", filters=128, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv6), conv1], axis=concat_axis)
conv7 = keras.layers.Conv3D(name="conv7a", filters=128, **params)(up6)
conv7 = keras.layers.BatchNormalization(axis =-1)(conv7)
conv7 = keras.layers.Activation('relu')(conv7)
conv7 = keras.layers.Conv3D(name="conv7b", filters=128, **params)(conv7)
conv7 = keras.layers.BatchNormalization(axis =-1)(conv7)
conv7 = keras.layers.Activation('relu')(conv7)
pred_msk = keras.layers.Conv3D(name="PredictionMask", filters=n_out, kernel_size=(1, 1, 1),
data_format=data_format, activation="sigmoid")(conv7)
#Branch is created from conv7 which are feature maps
#But global avg pooling on feature maps is not helping and hence changing back to pred_msk
class_pred = keras.layers.GlobalAveragePooling3D(name='PredictionClass')(pred_msk)
model = keras.models.Model(inputs=[inputs], outputs=[pred_msk,class_pred])
if print_summary:
model.summary()
return model
def unet3D_ModelB_exp1(input_img, use_upsampling=False, n_out=1, dropout=0.2,
print_summary = False):
"""
3D U-Net model
"""
concat_axis = -1
data_format = "channels_last"
print("3D U-Net Segmentation")
# Set keras learning phase to train
keras.backend.set_learning_phase(True)
# Don"t initialize variables on the fly
keras.backend.manual_variable_initialization(False)
inputs = keras.layers.Input(shape=input_img, name="Input_Image")
# Use below if wanted to use batch normalization and Relu activation separately
params = dict(kernel_size=(3, 3, 3), activation=None,
padding="same", data_format=data_format,
kernel_initializer="he_uniform")
# params = dict(kernel_size=(3, 3, 3), activation="relu",
# padding="same", data_format=data_format,
# kernel_initializer="he_uniform")
conv1 = keras.layers.Conv3D(name="conv1a", filters=32, **params)(inputs)
conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
conv1 = keras.layers.Conv3D(name="conv1b", filters=64, **params)(conv1)
conv1 = keras.layers.BatchNormalization(axis =-1)(conv1)
conv1 = keras.layers.Activation('relu')(conv1)
pool1 = keras.layers.MaxPooling3D(name="pool1", pool_size=(2, 2, 2))(conv1)
conv2 = keras.layers.Conv3D(name="conv2a", filters=64, **params)(pool1)
conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
conv2 = keras.layers.Conv3D(name="conv2b", filters=128, **params)(conv2)
conv2 = keras.layers.BatchNormalization(axis =-1)(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
pool2 = keras.layers.MaxPooling3D(name="pool2", pool_size=(2, 2, 2))(conv2)
conv3 = keras.layers.Conv3D(name="conv3a", filters=128, **params)(pool2)
conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
conv3 = keras.layers.Dropout(dropout)(conv3) ### Trying dropout layers earlier on, as indicated in the paper
conv3 = keras.layers.Conv3D(name="conv3b", filters=256, **params)(conv3)
conv3 = keras.layers.BatchNormalization(axis =-1)(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
pool3 = keras.layers.MaxPooling3D(name="pool3", pool_size=(2, 2, 2))(conv3)
conv4 = keras.layers.Conv3D(name="conv4a", filters=256, **params)(pool3)
conv4 = keras.layers.BatchNormalization(axis =-1)(conv4)
conv4 = keras.layers.Activation('relu')(conv4)
conv4 = keras.layers.Dropout(dropout)(conv4) ### Trying dropout layers earlier on, as indicated in the paper
conv4 = keras.layers.Conv3D(name="conv4b", filters=512, **params)(conv4)
conv4 = keras.layers.BatchNormalization(axis =-1)(conv4)
conv4 = keras.layers.Activation('relu')(conv4)
if use_upsampling:
up4 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up4", size=(2, 2, 2))(conv4), conv3], axis=concat_axis)
else:
up4 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv4", filters=512, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv4), conv3], axis=concat_axis)
conv5 = keras.layers.Conv3D(name="conv5a", filters=256, **params)(up4)
conv5 = keras.layers.BatchNormalization(axis =-1)(conv5)
conv5 = keras.layers.Activation('relu')(conv5)
conv5 = keras.layers.Conv3D(name="conv5b", filters=256, **params)(conv5)
conv5 = keras.layers.BatchNormalization(axis =-1)(conv5)
conv5 = keras.layers.Activation('relu')(conv5)
if use_upsampling:
up5 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up5", size=(2, 2, 2))(conv5), conv2], axis=concat_axis)
else:
up5 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv5", filters=256, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv5), conv2], axis=concat_axis)
conv6 = keras.layers.Conv3D(name="conv6a", filters=128, **params)(up5)
conv6 = keras.layers.BatchNormalization(axis =-1)(conv6)
conv6 = keras.layers.Activation('relu')(conv6)
conv6 = keras.layers.Conv3D(name="conv6b", filters=128, **params)(conv6)
conv6 = keras.layers.BatchNormalization(axis =-1)(conv6)
conv6 = keras.layers.Activation('relu')(conv6)
if use_upsampling:
up6 = keras.layers.concatenate([keras.layers.UpSampling3D(name="up6", size=(2, 2, 2))(conv6), conv1], axis=concat_axis)
else:
up6 = keras.layers.concatenate([keras.layers.Conv3DTranspose(name="transConv6", filters=128, data_format=data_format,
kernel_size=(2, 2, 2), strides=(2, 2, 2), padding="same")(conv6), conv1], axis=concat_axis)
conv7 = keras.layers.Conv3D(name="conv7a", filters=64, **params)(up6)
conv7 = keras.layers.BatchNormalization(axis =-1)(conv7)
conv7 = keras.layers.Activation('relu')(conv7)
conv7 = keras.layers.Conv3D(name="conv7b", filters=32, **params)(conv7)
conv7 = keras.layers.BatchNormalization(axis =-1)(conv7)
conv7 = keras.layers.Activation('relu')(conv7)
pred_msk = keras.layers.Conv3D(name="PredictionMask", filters=n_out, kernel_size=(1, 1, 1),
data_format=data_format, activation="sigmoid")(conv7)
#Branch is created from conv7 which are feature maps
#But global avg pooling on feature maps is not helping and hence changing back to pred_msk
# class_pred = keras.layers.GlobalAveragePooling3D(name='PredictionClass')(pred_msk)
model = keras.models.Model(inputs=[inputs], outputs=[pred_msk])
# model = keras.models.Model(inputs=[inputs], outputs=[pred_msk,class_pred])
if print_summary:
#model = keras.models.Model(inputs=[inputs], outputs=[class_pred])
model.summary()
return model
| 44.426357
| 218
| 0.720119
| 2,406
| 17,193
| 5.057772
| 0.111388
| 0.147342
| 0.057277
| 0.070754
| 0.877886
| 0.859643
| 0.853234
| 0.83573
| 0.83384
| 0.825787
| 0
| 0.051282
| 0.122143
| 17,193
| 386
| 219
| 44.541451
| 0.754986
| 0.133136
| 0
| 0.726592
| 0
| 0
| 0.052322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029963
| false
| 0
| 0.048689
| 0
| 0.108614
| 0.026217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2e83308d80252954ede642c21e4f817e1f4c438
| 23,608
|
py
|
Python
|
coop_cms/apps/newsletters/tests/test_sending.py
|
ljean/coop_cms
|
531f65ceb9ad82c113597d15b764dbcf51264794
|
[
"BSD-3-Clause"
] | 3
|
2016-01-29T10:55:09.000Z
|
2022-03-08T16:02:12.000Z
|
coop_cms/apps/newsletters/tests/test_sending.py
|
ljean/coop_cms
|
531f65ceb9ad82c113597d15b764dbcf51264794
|
[
"BSD-3-Clause"
] | 11
|
2015-03-07T17:30:24.000Z
|
2016-07-13T09:40:43.000Z
|
coop_cms/apps/newsletters/tests/test_sending.py
|
ljean/coop_cms
|
531f65ceb9ad82c113597d15b764dbcf51264794
|
[
"BSD-3-Clause"
] | 5
|
2018-08-30T09:03:22.000Z
|
2019-09-10T13:01:56.000Z
|
# -*- coding: utf-8 -*-
"""test email sending"""
from datetime import datetime
from unittest import skipIf
from django.conf import settings
from django.contrib.sites.models import Site
from django.core import management
from django.core import mail
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.translation import activate
from coop_cms.models import Newsletter
from coop_cms.tests import BaseTestCase
from coop_cms.utils import get_url_in_language
from model_mommy import mommy
from .. import models
class SendEmailingTest(BaseTestCase):
def setUp(self):
activate(settings.LANGUAGES[0][0])
super(SendEmailingTest, self).setUp()
settings.COOP_CMS_FROM_EMAIL = 'toto@toto.fr'
settings.COOP_CMS_REPLY_TO = 'titi@toto.fr'
site = Site.objects.get_current()
site.domain = settings.COOP_CMS_SITE_PREFIX
site.save()
def tearDown(self):
activate(settings.LANGUAGES[0][0])
@override_settings(COOP_CMS_REPLY_TO="")
def test_send_newsletter(self):
names = ['alpha', 'beta', 'gamma']
contacts = [
mommy.make(models.Contact, email=name + '@toto.fr')
for name in names
]
content = '<h2>Hello #!-fullname-!#!</h2><p>Visit <a href="http://toto.fr">us</a>'
content += '<a href="mailto:me@me.fr">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject': 'This is the subject',
'content': content,
'template': 'test/newsletter_contact.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=mommy.make(models.SubscriptionType, site=site)
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
for email, contact in zip(outbox, contacts):
viewonline_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, settings.COOP_CMS_FROM_EMAIL)
self.assertEqual(email.subject, newsletter_data['subject'])
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertEqual(email.extra_headers.get('Reply-To', ''), '')
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, email.from_email)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.fr") > 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
@skipIf(len(settings.LANGUAGES) < 2, "LANGUAGES less than 2")
@override_settings(COOP_CMS_REPLY_TO="")
def test_send_newsletter_language(self):
names = ['alpha', 'beta', 'gamma']
contacts = [
mommy.make(
models.Contact,
email=name+'@toto.fr',
last_name=name.capitalize(),
first_name=name,
) for name in names
]
origin_lang = settings.LANGUAGES[0][0]
trans_lang = settings.LANGUAGES[1][0]
content = '<h2>Hello #!-fullname-!#!</h2><p>{0}Visit <a href="http://toto.{0}">{0}</a>'
content += '<a href="mailto:me@me.{0}">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject_' + origin_lang: 'This is the {0} subject'.format(origin_lang),
'subject_' + trans_lang: 'This is the {0} subject'.format(trans_lang),
'content_' + origin_lang: content.format(origin_lang, ),
'content_' + trans_lang: content.format(trans_lang),
'template': 'test/newsletter_contact_lang.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=mommy.make(models.SubscriptionType, site=site, lang=trans_lang),
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
activate(trans_lang)
for email, contact in zip(outbox, contacts):
viewonline_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
view_en_url = reverse("newsletters:view_online_lang", args=[emailing.id, contact.uuid, 'en'])
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, settings.COOP_CMS_FROM_EMAIL)
self.assertEqual(email.subject, newsletter_data['subject_' + trans_lang])
self.assertEqual(email.extra_headers.get('Reply-To', ''), '')
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, email.from_email)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
# Check links are not magic
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(view_en_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.{0}".format(trans_lang)) > 0)
# Check internal links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
@skipIf(len(settings.LANGUAGES) < 2, "LANGUAGES less than 2")
@override_settings(COOP_CMS_REPLY_TO="")
def test_send_newsletter_contact_language(self):
"""test that we use the favorite language of the contact when sending him a newsletter"""
origin_lang = settings.LANGUAGES[0][0]
trans_lang = settings.LANGUAGES[1][0]
names = ['alpha', 'beta', 'gamma']
langs = ['', origin_lang, trans_lang]
contacts = [
mommy.make(
models.Contact,
email=name+'@toto.fr',
last_name=name.capitalize(),
first_name=name,
favorite_language=lang
) for (name, lang) in zip(names, langs)
]
content = '<h2>Hello #!-fullname-!#!</h2><p>{0}Visit <a href="http://toto.{0}">{0}</a>'
content += '<a href="mailto:me@me.{0}">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject_' + origin_lang: 'This is the {0} subject'.format(origin_lang),
'subject_' + trans_lang: 'This is the {0} subject'.format(trans_lang),
'content_' + origin_lang: content.format(origin_lang),
'content_' + trans_lang: content.format(trans_lang),
'template': 'test/newsletter_contact.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=mommy.make(models.SubscriptionType, site=site, lang=''),
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
activate(origin_lang)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
activate(trans_lang)
for email, contact, lang in zip(outbox, contacts, langs):
viewonline_url = reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
contact_lang = lang or origin_lang
viewonline_url = get_url_in_language(viewonline_url, contact_lang)
unsubscribe_url = get_url_in_language(unsubscribe_url, contact_lang)
viewonline_url = emailing.get_domain_url_prefix() + viewonline_url
unsubscribe_url = emailing.get_domain_url_prefix() + unsubscribe_url
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, settings.COOP_CMS_FROM_EMAIL)
self.assertEqual(email.subject, newsletter_data['subject_' + contact_lang])
self.assertEqual(email.extra_headers.get('Reply-To', ''), '')
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, email.from_email)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.{0}".format(contact_lang)) > 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
@override_settings(COOP_CMS_REPLY_TO="reply_to@toto.fr")
def test_send_newsletter_reply_to(self):
names = ['alpha', 'beta', 'gamma']
contacts = [
mommy.make(models.Contact, email=name+'@toto.fr', last_name=name.capitalize())
for name in names
]
content = '<h2>Hello #!-fullname-!#!</h2><p>Visit <a href="http://toto.fr">us</a>'
content += '<a href="mailto:me@me.fr">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject': 'This is the subject',
'content': content,
'template': 'test/newsletter_contact.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=mommy.make(models.SubscriptionType, site=site)
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
for email, contact in zip(outbox, contacts):
viewonline_url = reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
viewonline_url = emailing.get_domain_url_prefix() + viewonline_url
unsubscribe_url = emailing.get_domain_url_prefix() + unsubscribe_url
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, settings.COOP_CMS_FROM_EMAIL)
self.assertEqual(email.subject, newsletter_data['subject'])
self.assertEqual(email.extra_headers['Reply-To'], settings.COOP_CMS_REPLY_TO)
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, settings.COOP_CMS_REPLY_TO)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.fr") > 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
@override_settings(COOP_CMS_REPLY_TO="")
def test_send_newsletter_from_email(self):
names = ['alpha', 'beta', 'gamma']
contacts = [
mommy.make(models.Contact, email=name+'@toto.fr', last_name=name.capitalize())
for name in names
]
content = '<h2>Hello #!-fullname-!#!</h2><p>Visit <a href="http://toto.fr">us</a>'
content += '<a href="mailto:me@me.fr">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject': 'This is the subject',
'content': content,
'template': 'test/newsletter_contact.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=mommy.make(models.SubscriptionType, site=site, from_email="abcd@defg.fr"),
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
for email, contact in zip(outbox, contacts):
viewonline_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, emailing.from_email)
self.assertEqual(email.subject, newsletter_data['subject'])
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, emailing.from_email)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.fr") > 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
@override_settings(COOP_CMS_REPLY_TO="")
def test_send_newsletter_check_unregister_name(self):
names = ['alpha', 'beta', 'gamma']
contacts = [
mommy.make(models.Contact, email=name + '@toto.fr', last_name=name.capitalize())
for name in names
]
content = '<h2>Hello #!-fullname-!#!</h2><p>Visit <a href="http://toto.fr">us</a>'
content += '<a href="mailto:me@me.fr">mailme</a><a href="#art1">internal link</a></p>'
newsletter_data = {
'subject': 'This is the subject',
'content': content,
'template': 'test/newsletter_contact.html'
}
newsletter = mommy.make(Newsletter, **newsletter_data)
site = Site.objects.get_current()
site.domain = "toto.fr"
site.save()
subscription_1 = mommy.make(models.SubscriptionType, site=site, name="MY_COMPANY_#1")
subscription_2 = mommy.make(models.SubscriptionType, site=site, name="MY_COMPANY_#2", from_email="abcd@defg.fr")
emailing = mommy.make(
models.Emailing,
newsletter=newsletter,
status=models.Emailing.STATUS_SCHEDULED,
scheduling_dt=datetime.now(),
sending_dt=None,
subscription_type=subscription_2,
)
for contact in contacts:
emailing.send_to.add(contact)
emailing.save()
management.call_command('send_newsletters', verbosity=0)
emailing = models.Emailing.objects.get(id=emailing.id)
# Check emailing status
self.assertEqual(emailing.status, models.Emailing.STATUS_SENT)
self.assertNotEqual(emailing.sending_dt, None)
self.assertEqual(emailing.send_to.count(), 0)
self.assertEqual(emailing.sent_to.count(), len(contacts))
self.assertEqual(len(mail.outbox), len(contacts))
outbox = list(mail.outbox)
outbox.sort(key=lambda _elt: _elt.to)
contacts.sort(key=lambda _contact: _contact.email)
for email, contact in zip(outbox, contacts):
viewonline_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:view_online', args=[emailing.id, contact.uuid]
)
unsubscribe_url = emailing.get_domain_url_prefix() + reverse(
'newsletters:unregister', args=[emailing.id, contact.uuid]
)
self.assertFalse(email.body.find(subscription_1.name) >= 0)
self.assertTrue(email.body.find(subscription_2.name) >= 0)
self.assertEqual(email.to, [contact.email])
self.assertEqual(email.from_email, emailing.from_email)
self.assertEqual(email.subject, newsletter_data['subject'])
self.assertEqual(
email.extra_headers['List-Unsubscribe'],
'<{0}>, <mailto:{1}?subject=unsubscribe>'.format(unsubscribe_url, emailing.from_email)
)
self.assertTrue(email.body.find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][1], "text/html")
self.assertTrue(email.alternatives[0][0].find(contact.fullname) >= 0)
self.assertTrue(email.alternatives[0][0].find(viewonline_url) >= 0)
self.assertTrue(email.alternatives[0][0].find(unsubscribe_url) >= 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("mailto:me@me.fr") > 0)
# Check mailto links are not magic
self.assertTrue(email.alternatives[0][0].find("#art1") > 0)
# Check magic links
self.assertTrue(models.MagicLink.objects.count() > 0)
| 49.596639
| 120
| 0.61979
| 2,694
| 23,608
| 5.288048
| 0.060876
| 0.054752
| 0.060017
| 0.080514
| 0.917029
| 0.902639
| 0.89576
| 0.893303
| 0.893303
| 0.880668
| 0
| 0.011656
| 0.251398
| 23,608
| 475
| 121
| 49.701053
| 0.794432
| 0.033421
| 0
| 0.784722
| 0
| 0.027778
| 0.116885
| 0.051574
| 0
| 0
| 0
| 0
| 0.25463
| 1
| 0.018519
| false
| 0
| 0.032407
| 0
| 0.053241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
390d6679c99f83ec1e72c2a8055a612a4a382fd2
| 2,062
|
py
|
Python
|
demos/quote_front_test.py
|
sea1812/ofx
|
573d8efd8128c9b9a9b9e7c2adb7162aea2760ca
|
[
"Apache-2.0"
] | 3
|
2018-07-21T03:07:31.000Z
|
2020-01-03T14:52:52.000Z
|
demos/quote_front_test.py
|
sea1812/ofx
|
573d8efd8128c9b9a9b9e7c2adb7162aea2760ca
|
[
"Apache-2.0"
] | null | null | null |
demos/quote_front_test.py
|
sea1812/ofx
|
573d8efd8128c9b9a9b9e7c2adb7162aea2760ca
|
[
"Apache-2.0"
] | null | null | null |
# encoding: UTF-8
from mods import octkeypass
from socket import *
import json
import config
import time
host = "192.168.0.64"
port = 1025
buffersize=1
addr = (host,port)
client = socket()
client.connect(addr)
print 'connected'
def test_100001():
cmd = dict()
cmd['cmd']=100001
p = dict()
p['code']='399006'
p['market']='SZ'
cmd['params']=p
cstr = json.dumps(cmd)
cstr = octkeypass.encrypt(config.superkey, cstr)
print cstr
client.sendall("%s\r\n" % cstr)
c= client.makefile().readline()
print octkeypass.extract(c)
def test_100002():
cmd = dict()
cmd['cmd']=100002
p = dict()
p['code']='399006'
p['market']='SZ'
cmd['params']=p
cstr = json.dumps(cmd)
cstr = octkeypass.encrypt(config.superkey, cstr)
print cstr
client.sendall("%s\r\n" % cstr)
c = client.makefile().readline()
print octkeypass.extract(c)
def test_100003():
cmd = dict()
cmd['cmd']=100003
p = []
r = dict()
r['code']='399006'
r['market']='SZ'
p.append(r)
r = dict()
r['code']='399001'
r['market']='SZ'
p.append(r)
r = dict()
r['code']='300359'
r['market']='SZ'
p.append(r)
cmd['params']=p
cstr = json.dumps(cmd)
cstr = octkeypass.encrypt(config.superkey, cstr)
#print cstr
client.sendall("%s\r\n" % cstr)
c = client.makefile().readline()
print octkeypass.extract(c)
def test_110001():
cmd = dict()
cmd['cmd']=110001
p = []
r = dict()
r['code']='399006'
r['market']='SZ'
p.append(r)
r = dict()
r['code']='399001'
r['market']='SZ'
p.append(r)
r = dict()
r['code']='300359'
r['market']='SZ'
p.append(r)
cmd['params']=p
cstr = json.dumps(cmd)
cstr = octkeypass.encrypt(config.superkey, cstr)
print cstr
client.sendall("%s\r\n" % cstr)
c = client.makefile().readline()
print octkeypass.extract(c)
#test_100001()
#test_100002()
while True:
test_100003()
time.sleep(1)
#test_compress()
client.close()
| 18.745455
| 52
| 0.576625
| 282
| 2,062
| 4.187943
| 0.20922
| 0.054191
| 0.030483
| 0.050804
| 0.703641
| 0.703641
| 0.703641
| 0.703641
| 0.703641
| 0.703641
| 0
| 0.082697
| 0.237633
| 2,062
| 109
| 53
| 18.917431
| 0.668575
| 0.032008
| 0
| 0.724138
| 0
| 0
| 0.113065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.103448
| 0.057471
| null | null | 0.091954
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
1a7f76167d9167b8e9a8ca446d1047aacb0c6a85
| 415
|
py
|
Python
|
tests/data/python38.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 28
|
2018-09-23T16:01:51.000Z
|
2022-03-27T18:20:08.000Z
|
tests/data/python38.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 22
|
2018-10-10T11:49:07.000Z
|
2022-03-28T02:16:07.000Z
|
tests/data/python38.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 10
|
2020-09-29T21:52:36.000Z
|
2021-09-29T01:02:11.000Z
|
#!/usr/bin/env python3.8
def starred_return():
my_list = ["value2", "value3"]
return "value1", *my_list
def starred_yield():
my_list = ["value2", "value3"]
yield "value1", *my_list
# output
#!/usr/bin/env python3.8
def starred_return():
my_list = ["value2", "value3"]
return "value1", *my_list
def starred_yield():
my_list = ["value2", "value3"]
yield "value1", *my_list
| 14.821429
| 34
| 0.616867
| 55
| 415
| 4.436364
| 0.272727
| 0.196721
| 0.196721
| 0.295082
| 0.97541
| 0.97541
| 0.97541
| 0.97541
| 0.97541
| 0.97541
| 0
| 0.048632
| 0.207229
| 415
| 27
| 35
| 15.37037
| 0.693009
| 0.127711
| 0
| 1
| 0
| 0
| 0.200557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1ab13c4aeb71682271a18cc171683eca29ee94e6
| 132
|
py
|
Python
|
pratice/godot/modules/gridmap/config.py
|
wish-wish/skyheroes
|
6411a3834f0ef152371d732ef76321793e5a3955
|
[
"MIT"
] | null | null | null |
pratice/godot/modules/gridmap/config.py
|
wish-wish/skyheroes
|
6411a3834f0ef152371d732ef76321793e5a3955
|
[
"MIT"
] | null | null | null |
pratice/godot/modules/gridmap/config.py
|
wish-wish/skyheroes
|
6411a3834f0ef152371d732ef76321793e5a3955
|
[
"MIT"
] | 1
|
2019-01-13T00:44:17.000Z
|
2019-01-13T00:44:17.000Z
|
def can_build(platform):
# FIXME: Disabled temporary for gles3 implementation
return False
def configure(env):
pass
| 13.2
| 56
| 0.712121
| 16
| 132
| 5.8125
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.227273
| 132
| 9
| 57
| 14.666667
| 0.901961
| 0.378788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0.5
| false
| 0.25
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
46d2220ceb19d361594062e1747f79008e4e0bf1
| 14,800
|
py
|
Python
|
sdk/python/pulumi_sakuracloud/ssh_key_gen.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2019-12-07T07:46:05.000Z
|
2020-12-19T02:41:42.000Z
|
sdk/python/pulumi_sakuracloud/ssh_key_gen.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-09-11T04:41:06.000Z
|
2021-10-19T07:50:34.000Z
|
sdk/python/pulumi_sakuracloud/ssh_key_gen.py
|
sacloud/pulumi-sakuracloud
|
3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-09-08T05:38:16.000Z
|
2021-06-24T01:32:47.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['SSHKeyGenArgs', 'SSHKeyGen']
@pulumi.input_type
class SSHKeyGenArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pass_phrase: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a SSHKeyGen resource.
:param pulumi.Input[str] description: The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] pass_phrase: The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if pass_phrase is not None:
pulumi.set(__self__, "pass_phrase", pass_phrase)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="passPhrase")
def pass_phrase(self) -> Optional[pulumi.Input[str]]:
"""
The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "pass_phrase")
@pass_phrase.setter
def pass_phrase(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pass_phrase", value)
@pulumi.input_type
class _SSHKeyGenState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
fingerprint: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pass_phrase: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering SSHKeyGen resources.
:param pulumi.Input[str] description: The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] fingerprint: The fingerprint of the public key.
:param pulumi.Input[str] name: The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] pass_phrase: The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] private_key: The body of the private key.
:param pulumi.Input[str] public_key: The body of the public key.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if fingerprint is not None:
pulumi.set(__self__, "fingerprint", fingerprint)
if name is not None:
pulumi.set(__self__, "name", name)
if pass_phrase is not None:
pulumi.set(__self__, "pass_phrase", pass_phrase)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if public_key is not None:
pulumi.set(__self__, "public_key", public_key)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def fingerprint(self) -> Optional[pulumi.Input[str]]:
"""
The fingerprint of the public key.
"""
return pulumi.get(self, "fingerprint")
@fingerprint.setter
def fingerprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fingerprint", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="passPhrase")
def pass_phrase(self) -> Optional[pulumi.Input[str]]:
"""
The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "pass_phrase")
@pass_phrase.setter
def pass_phrase(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pass_phrase", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The body of the private key.
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> Optional[pulumi.Input[str]]:
"""
The body of the public key.
"""
return pulumi.get(self, "public_key")
@public_key.setter
def public_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_key", value)
class SSHKeyGen(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pass_phrase: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a SakuraCloud SSH Key Gen.
## Example Usage
```python
import pulumi
import pulumi_sakuracloud as sakuracloud
foobar = sakuracloud.SSHKeyGen("foobar", description="description")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] pass_phrase: The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[SSHKeyGenArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a SakuraCloud SSH Key Gen.
## Example Usage
```python
import pulumi
import pulumi_sakuracloud as sakuracloud
foobar = sakuracloud.SSHKeyGen("foobar", description="description")
```
:param str resource_name: The name of the resource.
:param SSHKeyGenArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SSHKeyGenArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pass_phrase: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SSHKeyGenArgs.__new__(SSHKeyGenArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["pass_phrase"] = pass_phrase
__props__.__dict__["fingerprint"] = None
__props__.__dict__["private_key"] = None
__props__.__dict__["public_key"] = None
super(SSHKeyGen, __self__).__init__(
'sakuracloud:index/sSHKeyGen:SSHKeyGen',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
fingerprint: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pass_phrase: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None) -> 'SSHKeyGen':
"""
Get an existing SSHKeyGen resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] fingerprint: The fingerprint of the public key.
:param pulumi.Input[str] name: The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] pass_phrase: The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
:param pulumi.Input[str] private_key: The body of the private key.
:param pulumi.Input[str] public_key: The body of the public key.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SSHKeyGenState.__new__(_SSHKeyGenState)
__props__.__dict__["description"] = description
__props__.__dict__["fingerprint"] = fingerprint
__props__.__dict__["name"] = name
__props__.__dict__["pass_phrase"] = pass_phrase
__props__.__dict__["private_key"] = private_key
__props__.__dict__["public_key"] = public_key
return SSHKeyGen(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the SSHKey. The length of this value must be in the range [`1`-`512`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def fingerprint(self) -> pulumi.Output[str]:
"""
The fingerprint of the public key.
"""
return pulumi.get(self, "fingerprint")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the SSHKey. The length of this value must be in the range [`1`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="passPhrase")
def pass_phrase(self) -> pulumi.Output[Optional[str]]:
"""
The pass phrase of the private key. The length of this value must be in the range [`8`-`64`]. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "pass_phrase")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> pulumi.Output[str]:
"""
The body of the private key.
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> pulumi.Output[str]:
"""
The body of the public key.
"""
return pulumi.get(self, "public_key")
| 43.023256
| 190
| 0.636014
| 1,861
| 14,800
| 4.873186
| 0.080602
| 0.073988
| 0.09108
| 0.094608
| 0.804499
| 0.777594
| 0.747381
| 0.733157
| 0.727533
| 0.691476
| 0
| 0.006484
| 0.260135
| 14,800
| 343
| 191
| 43.148688
| 0.821735
| 0.358378
| 0
| 0.630769
| 1
| 0
| 0.082454
| 0.004243
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158974
| false
| 0.138462
| 0.025641
| 0
| 0.282051
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
46fd1a45b692fe3172d7e89fcd86e0cc8e0b851d
| 91
|
py
|
Python
|
small_probs/__init__.py
|
wlad111/small_probs
|
423faf556075c4561154e11a8a064c70e0518112
|
[
"Apache-2.0"
] | null | null | null |
small_probs/__init__.py
|
wlad111/small_probs
|
423faf556075c4561154e11a8a064c70e0518112
|
[
"Apache-2.0"
] | null | null | null |
small_probs/__init__.py
|
wlad111/small_probs
|
423faf556075c4561154e11a8a064c70e0518112
|
[
"Apache-2.0"
] | null | null | null |
import small_probs
from small_probs import estimates
from small_probs.probability import *
| 22.75
| 37
| 0.868132
| 13
| 91
| 5.846154
| 0.461538
| 0.394737
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 91
| 4
| 37
| 22.75
| 0.938272
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2002b6b707e7695c140a9e1c71545f30a09c7b37
| 175
|
py
|
Python
|
core/src/zeit/content/video/tests/test_doctest.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 5
|
2019-05-16T09:51:29.000Z
|
2021-05-31T09:30:03.000Z
|
core/src/zeit/content/video/tests/test_doctest.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 107
|
2019-05-24T12:19:02.000Z
|
2022-03-23T15:05:56.000Z
|
core/src/zeit/content/video/tests/test_doctest.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 3
|
2020-08-14T11:01:17.000Z
|
2022-01-08T17:32:19.000Z
|
import zeit.content.video.testing
def test_suite():
return zeit.content.video.testing.FunctionalDocFileSuite(
'video.txt',
package='zeit.content.video')
| 21.875
| 61
| 0.708571
| 20
| 175
| 6.15
| 0.6
| 0.268293
| 0.390244
| 0.373984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 175
| 7
| 62
| 25
| 0.848276
| 0
| 0
| 0
| 0
| 0
| 0.154286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
2035a47ebf174cfb795164237e03a356ef200c90
| 1,297
|
py
|
Python
|
hackerrank/Data Structures/Compare two linked lists/test.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | 4
|
2020-07-24T01:59:50.000Z
|
2021-07-24T15:14:08.000Z
|
hackerrank/Data Structures/Compare two linked lists/test.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
hackerrank/Data Structures/Compare two linked lists/test.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
import unittest
import solution
class TestQ(unittest.TestCase):
def test_case_0(self):
list1 = solution.SinglyLinkedList()
for i in [1, 2]:
list1.insert_node(i)
list2 = solution.SinglyLinkedList()
for i in [1, 1]:
list2.insert_node(i)
self.assertEqual(solution.compare_lists(list1.head, list2.head), 0)
list1 = solution.SinglyLinkedList()
for i in [1, 2]:
list1.insert_node(i)
list2 = solution.SinglyLinkedList()
for i in [1, 2]:
list2.insert_node(i)
self.assertEqual(solution.compare_lists(list1.head, list2.head), 1)
def test_case_1(self):
list1 = solution.SinglyLinkedList()
for i in [3, 2, 2]:
list1.insert_node(i)
list2 = solution.SinglyLinkedList()
for i in [3, 2, 2]:
list2.insert_node(i)
self.assertEqual(solution.compare_lists(list1.head, list2.head), 1)
list1 = solution.SinglyLinkedList()
for i in [2, 1]:
list1.insert_node(i)
list2 = solution.SinglyLinkedList()
for i in [1, 2]:
list2.insert_node(i)
self.assertEqual(solution.compare_lists(list1.head, list2.head), 0)
if __name__ == '__main__':
unittest.main()
| 30.880952
| 75
| 0.596762
| 161
| 1,297
| 4.658385
| 0.180124
| 0.256
| 0.288
| 0.298667
| 0.862667
| 0.862667
| 0.816
| 0.798667
| 0.753333
| 0.753333
| 0
| 0.052061
| 0.289129
| 1,297
| 41
| 76
| 31.634146
| 0.761388
| 0
| 0
| 0.742857
| 0
| 0
| 0.006168
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.057143
| false
| 0
| 0.057143
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
203b49a659ff1631e33c28dc083a0d246474a58e
| 9,879
|
py
|
Python
|
tests/pytests/unit/beacons/test_btmp.py
|
waynegemmell/salt
|
88056db3589cccab8956c2ae4f9b733acce89461
|
[
"Apache-2.0"
] | 3
|
2015-08-30T04:23:47.000Z
|
2018-07-15T00:35:23.000Z
|
tests/pytests/unit/beacons/test_btmp.py
|
waynegemmell/salt
|
88056db3589cccab8956c2ae4f9b733acce89461
|
[
"Apache-2.0"
] | 4
|
2016-05-10T22:05:34.000Z
|
2016-05-20T18:10:13.000Z
|
tests/pytests/unit/beacons/test_btmp.py
|
waynegemmell/salt
|
88056db3589cccab8956c2ae4f9b733acce89461
|
[
"Apache-2.0"
] | 1
|
2019-12-17T13:37:16.000Z
|
2019-12-17T13:37:16.000Z
|
# Python libs
import datetime
import logging
import pytest
# Salt libs
import salt.beacons.btmp as btmp
from tests.support.mock import MagicMock, mock_open, patch
# pylint: disable=import-error
try:
import dateutil.parser as dateutil_parser # pylint: disable=unused-import
_TIME_SUPPORTED = True
except ImportError:
_TIME_SUPPORTED = False
raw = b"\x06\x00\x00\x00Nt\x00\x00ssh:notty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00garet\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00::1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdd\xc7\xc2Y\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
pack = (
6,
29774,
b"ssh:notty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\x00",
b"garet\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"::1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
0,
0,
0,
1505937373,
0,
0,
0,
0,
16777216,
)
log = logging.getLogger(__name__)
@pytest.fixture
def configure_loader_modules():
return {btmp: {"__context__": {"btmp.loc": 2}, "__salt__": {}}}
def test_non_list_config():
config = {}
ret = btmp.validate(config)
assert ret == (False, "Configuration for btmp beacon must be a list.")
def test_empty_config():
config = [{}]
ret = btmp.validate(config)
assert ret == (True, "Valid beacon configuration")
def test_no_match():
config = [
{
"users": {
"gareth": {
"time_range": {
"end": "09-22-2017 5pm",
"start": "09-22-2017 3pm",
}
}
}
}
]
ret = btmp.validate(config)
assert ret == (True, "Valid beacon configuration")
with patch("salt.utils.files.fopen", mock_open(b"")) as m_open:
ret = btmp.beacon(config)
call_args = next(iter(m_open.filehandles.values()))[0].call.args
assert call_args == (btmp.BTMP, "rb"), call_args
assert ret == [], ret
def test_invalid_users():
config = [{"users": ["gareth"]}]
ret = btmp.validate(config)
assert ret == (False, "User configuration for btmp beacon must be a dictionary.")
def test_invalid_groups():
config = [{"groups": ["docker"]}]
ret = btmp.validate(config)
assert ret == (False, "Group configuration for btmp beacon must be a dictionary.")
def test_default_invalid_time_range():
config = [{"defaults": {"time_range": {"start": "3pm"}}}]
ret = btmp.validate(config)
assert ret == (
False,
"The time_range parameter for btmp beacon must contain start & end options.",
)
def test_users_invalid_time_range():
config = [{"users": {"gareth": {"time_range": {"start": "3pm"}}}}]
ret = btmp.validate(config)
assert ret == (
False,
"The time_range parameter for btmp beacon must contain start & end options.",
)
def test_groups_invalid_time_range():
config = [{"groups": {"docker": {"time_range": {"start": "3pm"}}}}]
ret = btmp.validate(config)
assert ret == (
False,
"The time_range parameter for btmp beacon must contain start & end options.",
)
def test_match():
with patch("salt.utils.files.fopen", mock_open(read_data=raw)):
with patch("struct.unpack", MagicMock(return_value=pack)):
config = [{"users": {"garet": {}}}]
ret = btmp.validate(config)
assert ret == (True, "Valid beacon configuration")
_expected = [
{
"addr": 1505937373,
"exit_status": 0,
"inittab": "",
"hostname": "::1",
"PID": 29774,
"session": 0,
"user": "garet",
"time": 0,
"line": "ssh:notty",
"type": 6,
}
]
ret = btmp.beacon(config)
assert ret == _expected
@pytest.mark.skipif(_TIME_SUPPORTED is False, reason="dateutil.parser is missing.")
def test_match_time():
with patch("salt.utils.files.fopen", mock_open(read_data=raw)):
mock_now = datetime.datetime(2017, 9, 22, 16, 0, 0, 0)
with patch("datetime.datetime", MagicMock()), patch(
"datetime.datetime.now", MagicMock(return_value=mock_now)
):
with patch("struct.unpack", MagicMock(return_value=pack)):
config = [
{
"users": {
"garet": {
"time_range": {
"end": "09-22-2017 5pm",
"start": "09-22-2017 3pm",
}
}
}
}
]
ret = btmp.validate(config)
assert ret == (True, "Valid beacon configuration")
_expected = [
{
"addr": 1505937373,
"exit_status": 0,
"inittab": "",
"hostname": "::1",
"PID": 29774,
"session": 0,
"user": "garet",
"time": 0,
"line": "ssh:notty",
"type": 6,
}
]
ret = btmp.beacon(config)
assert ret == _expected
def test_match_group():
for groupadd in (
"salt.modules.aix_group",
"salt.modules.mac_group",
"salt.modules.pw_group",
"salt.modules.solaris_group",
"salt.modules.win_groupadd",
):
mock_group_info = {
"passwd": "x",
"gid": 100,
"name": "users",
"members": ["garet"],
}
with patch("salt.utils.files.fopen", mock_open(read_data=raw)):
with patch("time.time", MagicMock(return_value=1506121200)):
with patch("struct.unpack", MagicMock(return_value=pack)):
with patch(
"{}.info".format(groupadd),
new=MagicMock(return_value=mock_group_info),
):
config = [
{
"group": {
"users": {
"time_range": {"end": "5pm", "start": "3pm"}
}
}
}
]
ret = btmp.validate(config)
assert ret == (True, "Valid beacon configuration")
_expected = [
{
"addr": 1505937373,
"exit_status": 0,
"inittab": "",
"hostname": "::1",
"PID": 29774,
"session": 0,
"user": "garet",
"time": 0,
"line": "ssh:notty",
"type": 6,
}
]
ret = btmp.beacon(config)
assert ret == _expected
| 38.142857
| 1,485
| 0.52394
| 1,307
| 9,879
| 3.885998
| 0.117062
| 0.77023
| 1.137626
| 1.49557
| 0.742863
| 0.733609
| 0.733609
| 0.712345
| 0.68478
| 0.68478
| 0
| 0.222503
| 0.318048
| 9,879
| 258
| 1,486
| 38.290698
| 0.531394
| 0.008098
| 0
| 0.472081
| 0
| 0.020305
| 0.418522
| 0.299163
| 0
| 1
| 0
| 0
| 0.081218
| 1
| 0.060914
| false
| 0.005076
| 0.035533
| 0.005076
| 0.101523
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
64511ac290f51e25b0d2db8f8bbc791a25b52550
| 391
|
py
|
Python
|
tests/internal/ena_support/test_ena_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/ena_support/test_ena_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/ena_support/test_ena_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module ena_support.unsupported
import pytest
import ec2_compare.internal.ena_support.unsupported
def test_get_internal_data_ena_support_unsupported_get_instances_list():
assert len(ec2_compare.internal.ena_support.unsupported.get_instances_list()) > 0
def test_get_internal_data_ena_support_unsupported_get():
assert len(ec2_compare.internal.ena_support.unsupported.get) > 0
| 39.1
| 83
| 0.86445
| 56
| 391
| 5.589286
| 0.339286
| 0.191693
| 0.402556
| 0.306709
| 0.827476
| 0.827476
| 0.619808
| 0.619808
| 0.619808
| 0
| 0
| 0.013699
| 0.066496
| 391
| 9
| 84
| 43.444444
| 0.843836
| 0.097187
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
64552bc4794da567f913b2b4b1edb9a1c5b98aa5
| 258
|
py
|
Python
|
tests/optionstrader/context.py
|
Zaitsev11/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | 6
|
2018-04-26T03:02:04.000Z
|
2022-02-26T04:58:53.000Z
|
tests/optionstrader/context.py
|
webclinic017/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | null | null | null |
tests/optionstrader/context.py
|
webclinic017/Optionstrader
|
ed2dbef802ad08f14a0e5280e91746f1bf1fa3f3
|
[
"MIT"
] | 5
|
2019-12-01T08:09:08.000Z
|
2021-11-28T03:43:24.000Z
|
import os
import sys
#sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')))
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))
print(path)
import optionstrader
| 32.25
| 86
| 0.689922
| 40
| 258
| 4.25
| 0.3
| 0.247059
| 0.229412
| 0.164706
| 0.711765
| 0.711765
| 0.711765
| 0.470588
| 0.470588
| 0.470588
| 0
| 0.008264
| 0.062016
| 258
| 7
| 87
| 36.857143
| 0.694215
| 0.158915
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.166667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
647054e54a46d29f80374928dfb89d035fab5c79
| 3,855
|
py
|
Python
|
concertina_tesseract_details/projection_tetrahedral.py
|
watchduck/concertina_hypercubes
|
4d51d4ebcb2ea13a237356bb238b066f6f3d9feb
|
[
"MIT"
] | 1
|
2018-10-28T08:58:54.000Z
|
2018-10-28T08:58:54.000Z
|
concertina_tesseract_details/projection_tetrahedral.py
|
watchduck/concertina_hypercubes
|
4d51d4ebcb2ea13a237356bb238b066f6f3d9feb
|
[
"MIT"
] | null | null | null |
concertina_tesseract_details/projection_tetrahedral.py
|
watchduck/concertina_hypercubes
|
4d51d4ebcb2ea13a237356bb238b066f6f3d9feb
|
[
"MIT"
] | 1
|
2021-06-19T21:35:11.000Z
|
2021-06-19T21:35:11.000Z
|
from functions import project_from_4d_to_3d
import numpy as np
point_fr = np.array([-1, -1, -1, -1]) # from
# the 149 vertices except the lowest one used as view point
vertices = [[-1, -1, -1, -1/5], [-1, -1, -3/5, 1/5], [-1, -1, -1/5, -1], [-1, -1, 1/5, -3/5], [-1, -1, 1/5, 1/5], [-1, -3/5, -1, 1/5], [-1, -3/5, -3/5, 3/5], [-1, -3/5, 1/5, -1], [-1, -3/5, 1/5, 3/5], [-1, -3/5, 3/5, -3/5], [-1, -3/5, 3/5, 1/5], [-1, -1/5, -1, -1], [-1, -1/5, 3/5, 3/5], [-1, 1/5, -1, -3/5], [-1, 1/5, -1, 1/5], [-1, 1/5, -3/5, -1], [-1, 1/5, -3/5, 3/5], [-1, 1/5, 1/5, -1], [-1, 1/5, 3/5, -3/5], [-1, 3/5, -3/5, -3/5], [-1, 3/5, -3/5, 1/5], [-1, 3/5, -1/5, 3/5], [-1, 3/5, 1/5, -3/5], [-1, 3/5, 3/5, -1/5], [-1, 3/5, 3/5, 3/5], [-3/5, -1, -1, 1/5], [-3/5, -1, -3/5, 3/5], [-3/5, -1, 1/5, -1], [-3/5, -1, 1/5, 3/5], [-3/5, -1, 3/5, -3/5], [-3/5, -1, 3/5, 1/5], [-3/5, -3/5, -1, 3/5], [-3/5, -3/5, -3/5, 1], [-3/5, -3/5, 1/5, 1], [-3/5, -3/5, 3/5, -1], [-3/5, -3/5, 1, -3/5], [-3/5, -3/5, 1, 1/5], [-3/5, -1/5, 3/5, 1], [-3/5, -1/5, 1, 3/5], [-3/5, 1/5, -1, -1], [-3/5, 1/5, -1, 3/5], [-3/5, 1/5, -3/5, 1], [-3/5, 1/5, 3/5, -1], [-3/5, 1/5, 1, -3/5], [-3/5, 3/5, -1, -3/5], [-3/5, 3/5, -1, 1/5], [-3/5, 3/5, -3/5, -1], [-3/5, 3/5, -1/5, 1], [-3/5, 3/5, 1/5, -1], [-3/5, 3/5, 3/5, 1], [-3/5, 3/5, 1, -1/5], [-3/5, 3/5, 1, 3/5], [-3/5, 1, -3/5, -3/5], [-3/5, 1, -3/5, 1/5], [-3/5, 1, -1/5, 3/5], [-3/5, 1, 1/5, -3/5], [-3/5, 1, 3/5, -1/5], [-3/5, 1, 3/5, 3/5], [-1/5, -1, -1, -1], [-1/5, -1, 3/5, 3/5], [-1/5, -3/5, 3/5, 1], [-1/5, -3/5, 1, 3/5], [-1/5, -1/5, 1, 1], [-1/5, 3/5, -1, 3/5], [-1/5, 3/5, -3/5, 1], [-1/5, 3/5, 3/5, -1], [-1/5, 3/5, 1, -3/5], [-1/5, 3/5, 1, 1], [-1/5, 1, -3/5, 3/5], [-1/5, 1, -1/5, 1], [-1/5, 1, 3/5, -3/5], [-1/5, 1, 3/5, 1], [-1/5, 1, 1, -1/5], [-1/5, 1, 1, 3/5], [1/5, -1, -1, -3/5], [1/5, -1, -1, 1/5], [1/5, -1, -3/5, -1], [1/5, -1, -3/5, 3/5], [1/5, -1, 1/5, -1], [1/5, -1, 3/5, -3/5], [1/5, -3/5, -1, -1], [1/5, -3/5, -1, 3/5], [1/5, -3/5, -3/5, 1], [1/5, -3/5, 3/5, -1], [1/5, -3/5, 1, -3/5], [1/5, 1/5, -1, -1], [1/5, 3/5, -1, -3/5], [1/5, 3/5, -3/5, -1], [1/5, 1, -3/5, -3/5], [1/5, 1, 1, 1], [3/5, -1, -3/5, -3/5], [3/5, -1, -3/5, 1/5], [3/5, -1, -1/5, 3/5], [3/5, -1, 1/5, -3/5], [3/5, -1, 3/5, -1/5], [3/5, -1, 3/5, 3/5], [3/5, -3/5, -1, -3/5], [3/5, -3/5, -1, 1/5], [3/5, -3/5, -3/5, -1], [3/5, -3/5, -1/5, 1], [3/5, -3/5, 1/5, -1], [3/5, -3/5, 3/5, 1], [3/5, -3/5, 1, -1/5], [3/5, -3/5, 1, 3/5], [3/5, -1/5, -1, 3/5], [3/5, -1/5, -3/5, 1], [3/5, -1/5, 3/5, -1], [3/5, -1/5, 1, -3/5], [3/5, -1/5, 1, 1], [3/5, 1/5, -1, -3/5], [3/5, 1/5, -3/5, -1], [3/5, 3/5, -1, -1/5], [3/5, 3/5, -1, 3/5], [3/5, 3/5, -3/5, 1], [3/5, 3/5, -1/5, -1], [3/5, 3/5, 3/5, -1], [3/5, 3/5, 1, -3/5], [3/5, 1, -3/5, -1/5], [3/5, 1, -3/5, 3/5], [3/5, 1, -1/5, -3/5], [3/5, 1, -1/5, 1], [3/5, 1, 3/5, -3/5], [3/5, 1, 1, -1/5], [1, -3/5, -3/5, -3/5], [1, -3/5, -3/5, 1/5], [1, -3/5, -1/5, 3/5], [1, -3/5, 1/5, -3/5], [1, -3/5, 3/5, -1/5], [1, -3/5, 3/5, 3/5], [1, -1/5, -3/5, 3/5], [1, -1/5, -1/5, 1], [1, -1/5, 3/5, -3/5], [1, -1/5, 3/5, 1], [1, -1/5, 1, -1/5], [1, -1/5, 1, 3/5], [1, 1/5, -3/5, -3/5], [1, 1/5, 1, 1], [1, 3/5, -3/5, -1/5], [1, 3/5, -3/5, 3/5], [1, 3/5, -1/5, -3/5], [1, 3/5, -1/5, 1], [1, 3/5, 3/5, -3/5], [1, 3/5, 1, -1/5], [1, 1, -1/5, -1/5], [1, 1, -1/5, 3/5], [1, 1, 1/5, 1], [1, 1, 3/5, -1/5], [1, 1, 1, 1/5], [1, 1, 1, 1]]
projected_vertices = []
vertex_distances = []
for vertex in vertices:
projected_vertices.append(
project_from_4d_to_3d(vertex, point_fr)
)
vertex_distances.append(
np.linalg.norm(vertex - point_fr)
)
povray_vertex_strings = ''
for vertex in projected_vertices:
povray_point = ''
for c in vertex:
povray_point += str(c) + ', '
povray_point = povray_point[:-2]
povray_vertex_strings += '<{p}>, '.format(p=povray_point)
print(povray_vertex_strings)
| 132.931034
| 3,177
| 0.379507
| 1,084
| 3,855
| 1.324723
| 0.0369
| 0.367688
| 0.348886
| 0.311978
| 0.711699
| 0.688022
| 0.688022
| 0.685933
| 0.683148
| 0.682451
| 0
| 0.320616
| 0.190921
| 3,855
| 28
| 3,178
| 137.678571
| 0.139788
| 0.016083
| 0
| 0
| 0
| 0
| 0.002375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.095238
| 0.047619
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b39af89b1376d4710ef2624fab89ade20bda66eb
| 18,059
|
py
|
Python
|
coba/tests/test_environments_filters.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/tests/test_environments_filters.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/tests/test_environments_filters.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from math import isnan
from coba.config import CobaConfig, NullLogger
from coba.environments import SimulatedInteraction, Sort, Scale, Cycle, Impute
CobaConfig.logger = NullLogger()
class Sort_tests(unittest.TestCase):
def test_sort1(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((8,3), [1], rewards=[1])
]
mem_interactions = interactions
srt_interactions = list(Sort([0]).filter(mem_interactions))
self.assertEqual((7,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((8,3), mem_interactions[2].context)
self.assertEqual((1,9), srt_interactions[0].context)
self.assertEqual((7,2), srt_interactions[1].context)
self.assertEqual((8,3), srt_interactions[2].context)
def test_sort2(self) -> None:
interactions = [
SimulatedInteraction((1,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((1,3), [1], rewards=[1])
]
mem_interactions = interactions
srt_interactions = list(Sort([0,1]).filter(mem_interactions))
self.assertEqual((1,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((1,3), mem_interactions[2].context)
self.assertEqual((1,2), srt_interactions[0].context)
self.assertEqual((1,3), srt_interactions[1].context)
self.assertEqual((1,9), srt_interactions[2].context)
def test_sort3(self) -> None:
interactions = [
SimulatedInteraction((1,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((1,3), [1], rewards=[1])
]
mem_interactions = interactions
srt_interactions = list(Sort(*[0,1]).filter(mem_interactions))
self.assertEqual((1,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((1,3), mem_interactions[2].context)
self.assertEqual((1,2), srt_interactions[0].context)
self.assertEqual((1,3), srt_interactions[1].context)
self.assertEqual((1,9), srt_interactions[2].context)
def test_params(self):
self.assertEqual({'sort':[0]}, Sort([0]).params)
self.assertEqual({'sort':[1,2]}, Sort([1,2]).params)
class Scale_tests(unittest.TestCase):
def test_scale_min_and_minmax_using_all(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((8,3), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale().filter(interactions))
self.assertEqual((7,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((8,3), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual((6/7,0 ), scl_interactions[0].context)
self.assertEqual((0 ,1 ), scl_interactions[1].context)
self.assertEqual((1 ,1/7), scl_interactions[2].context)
def test_scale_min_and_minmax_using_2(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((8,3), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale(using=2).filter(interactions))
self.assertEqual((7,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((8,3), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual((1 ,0 ), scl_interactions[0].context)
self.assertEqual((0 ,1 ), scl_interactions[1].context)
self.assertAlmostEqual(7/6, scl_interactions[2].context[0])
self.assertAlmostEqual(1/7, scl_interactions[2].context[1])
def test_scale_0_and_2(self) -> None:
interactions = [
SimulatedInteraction((8,2), [1], rewards=[1]),
SimulatedInteraction((4,4), [1], rewards=[1]),
SimulatedInteraction((2,6), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale(shift=0,scale=1/2,using=2).filter(interactions))
self.assertEqual((8,2), mem_interactions[0].context)
self.assertEqual((4,4), mem_interactions[1].context)
self.assertEqual((2,6), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual((4,1), scl_interactions[0].context)
self.assertEqual((2,2), scl_interactions[1].context)
self.assertEqual((1,3), scl_interactions[2].context)
def test_scale_mean_and_std(self) -> None:
interactions = [
SimulatedInteraction((8,2), [1], rewards=[1]),
SimulatedInteraction((4,4), [1], rewards=[1]),
SimulatedInteraction((0,6), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale(shift="mean",scale="std").filter(interactions))
self.assertEqual((8,2), mem_interactions[0].context)
self.assertEqual((4,4), mem_interactions[1].context)
self.assertEqual((0,6), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual(( 4/4,-2/2), scl_interactions[0].context)
self.assertEqual(( 0/4, 0/2), scl_interactions[1].context)
self.assertEqual((-4/4, 2/2), scl_interactions[2].context)
def test_scale_med_and_iqr(self) -> None:
interactions = [
SimulatedInteraction((8,2), [1], rewards=[1]),
SimulatedInteraction((4,4), [1], rewards=[1]),
SimulatedInteraction((0,6), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale(shift="med",scale="iqr").filter(interactions))
self.assertEqual((8,2), mem_interactions[0].context)
self.assertEqual((4,4), mem_interactions[1].context)
self.assertEqual((0,6), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual(( 4/8,-2/4), scl_interactions[0].context)
self.assertEqual(( 0/8, 0/4), scl_interactions[1].context)
self.assertEqual((-4/8, 2/4), scl_interactions[2].context)
def test_scale_med_and_iqr_0(self) -> None:
interactions = [
SimulatedInteraction((8,2), [1], rewards=[1]),
SimulatedInteraction((4,2), [1], rewards=[1]),
SimulatedInteraction((0,2), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale(shift="med",scale="iqr").filter(interactions))
self.assertEqual((8,2), mem_interactions[0].context)
self.assertEqual((4,2), mem_interactions[1].context)
self.assertEqual((0,2), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual(( 4/8, 0), scl_interactions[0].context)
self.assertEqual(( 0/8, 0), scl_interactions[1].context)
self.assertEqual((-4/8, 0), scl_interactions[2].context)
def test_scale_min_and_minmax_with_str(self) -> None:
interactions = [
SimulatedInteraction((7,2,"A"), [1], rewards=[1]),
SimulatedInteraction((1,9,"B"), [1], rewards=[1]),
SimulatedInteraction((8,3,"C"), [1], rewards=[1])
]
mem_interactions = interactions
scl_interactions = list(Scale().filter(interactions))
self.assertEqual((7,2,"A"), mem_interactions[0].context)
self.assertEqual((1,9,"B"), mem_interactions[1].context)
self.assertEqual((8,3,"C"), mem_interactions[2].context)
self.assertEqual(3, len(scl_interactions))
self.assertEqual((6/7,0 ,"A"), scl_interactions[0].context)
self.assertEqual((0 ,1 ,"B"), scl_interactions[1].context)
self.assertEqual((1 ,1/7,"C"), scl_interactions[2].context)
def test_scale_min_and_minmax_with_nan(self) -> None:
interactions = [
SimulatedInteraction((float('nan'), 2 ), [1], rewards=[1]),
SimulatedInteraction((1 , 9 ), [1], rewards=[1]),
SimulatedInteraction((8 , float('nan')), [1], rewards=[1])
]
scl_interactions = list(Scale().filter(interactions))
self.assertEqual(3, len(scl_interactions))
self.assertTrue(isnan(scl_interactions[0].context[0]))
self.assertEqual(0, scl_interactions[0].context[1])
self.assertEqual((0, 1), scl_interactions[1].context)
self.assertEqual(1 , scl_interactions[2].context[0])
self.assertTrue(isnan(scl_interactions[2].context[1]))
def test_scale_min_and_minmax_with_nan(self) -> None:
interactions = [
SimulatedInteraction(("A", 2 ), [1], rewards=[1]),
SimulatedInteraction((1 , 9 ), [1], rewards=[1]),
SimulatedInteraction((8 , "B"), [1], rewards=[1])
]
scl_interactions = list(Scale().filter(interactions))
self.assertEqual(3, len(scl_interactions))
self.assertEqual("A", scl_interactions[0].context[0])
self.assertEqual(0 , scl_interactions[0].context[1])
self.assertEqual((0, 1), scl_interactions[1].context)
self.assertEqual(1 , scl_interactions[2].context[0])
self.assertTrue("B" , scl_interactions[2].context[1])
def test_params(self):
self.assertEqual({"scale_shift":"mean","scale_scale":"std","scale_using":None}, Scale(shift="mean",scale="std").params)
self.assertEqual({"scale_shift":2,"scale_scale":1/2,"scale_using":None}, Scale(shift=2,scale=1/2).params)
self.assertEqual({"scale_shift":2,"scale_scale":1/2,"scale_using":10}, Scale(shift=2,scale=1/2,using=10).params)
class Cycle_tests(unittest.TestCase):
def test_after_0(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1,2], rewards=[1,3]),
SimulatedInteraction((1,9), [1,2], rewards=[1,4]),
SimulatedInteraction((8,3), [1,2], rewards=[1,5])
]
mem_interactions = interactions
cyc_interactions = list(Cycle().filter(mem_interactions))
self.assertEqual([1,3], mem_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], mem_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], mem_interactions[2].kwargs["rewards"])
self.assertEqual([1,3], mem_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], mem_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], mem_interactions[2].kwargs["rewards"])
self.assertEqual(3, len(cyc_interactions))
self.assertEqual([3,1], cyc_interactions[0].kwargs["rewards"])
self.assertEqual([4,1], cyc_interactions[1].kwargs["rewards"])
self.assertEqual([5,1], cyc_interactions[2].kwargs["rewards"])
self.assertEqual([3,1], cyc_interactions[0].kwargs["rewards"])
self.assertEqual([4,1], cyc_interactions[1].kwargs["rewards"])
self.assertEqual([5,1], cyc_interactions[2].kwargs["rewards"])
def test_after_1(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1,2], rewards=[1,3]),
SimulatedInteraction((1,9), [1,2], rewards=[1,4]),
SimulatedInteraction((8,3), [1,2], rewards=[1,5])
]
mem_interactions = interactions
cyc_interactions = list(Cycle(after=1).filter(mem_interactions))
self.assertEqual([1,3], mem_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], mem_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], mem_interactions[2].kwargs["rewards"])
self.assertEqual(3, len(cyc_interactions))
self.assertEqual([1,3], cyc_interactions[0].kwargs["rewards"])
self.assertEqual([4,1], cyc_interactions[1].kwargs["rewards"])
self.assertEqual([5,1], cyc_interactions[2].kwargs["rewards"])
def test_after_2(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1,2], rewards=[1,3]),
SimulatedInteraction((1,9), [1,2], rewards=[1,4]),
SimulatedInteraction((8,3), [1,2], rewards=[1,5])
]
mem_interactions = interactions
cyc_interactions = list(Cycle(after=2).filter(mem_interactions))
self.assertEqual([1,3], mem_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], mem_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], mem_interactions[2].kwargs["rewards"])
self.assertEqual(3, len(cyc_interactions))
self.assertEqual([1,3], cyc_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], cyc_interactions[1].kwargs["rewards"])
self.assertEqual([5,1], cyc_interactions[2].kwargs["rewards"])
def test_after_10(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1,2], rewards=[1,3]),
SimulatedInteraction((1,9), [1,2], rewards=[1,4]),
SimulatedInteraction((8,3), [1,2], rewards=[1,5])
]
mem_interactions = interactions
cyc_interactions = list(Cycle(after=10).filter(mem_interactions))
self.assertEqual([1,3], mem_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], mem_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], mem_interactions[2].kwargs["rewards"])
self.assertEqual(3, len(cyc_interactions))
self.assertEqual([1,3], cyc_interactions[0].kwargs["rewards"])
self.assertEqual([1,4], cyc_interactions[1].kwargs["rewards"])
self.assertEqual([1,5], cyc_interactions[2].kwargs["rewards"])
def test_params(self):
self.assertEqual({"cycle_after":0 }, Cycle().params)
self.assertEqual({"cycle_after":2 }, Cycle(2).params)
class Impute_tests(unittest.TestCase):
def test_impute_nothing(self) -> None:
interactions = [
SimulatedInteraction((7,2), [1], rewards=[1]),
SimulatedInteraction((1,9), [1], rewards=[1]),
SimulatedInteraction((8,3), [1], rewards=[1])
]
mem_interactions = interactions
imp_interactions = list(Impute().filter(interactions))
self.assertEqual((7,2), mem_interactions[0].context)
self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((8,3), mem_interactions[2].context)
self.assertEqual(3, len(imp_interactions))
self.assertEqual((7,2), imp_interactions[0].context)
self.assertEqual((1,9), imp_interactions[1].context)
self.assertEqual((8,3), imp_interactions[2].context)
def test_impute_mean(self) -> None:
interactions = [
SimulatedInteraction((7 , 2 ), [1], rewards=[1]),
SimulatedInteraction((float('nan'), float('nan')), [1], rewards=[1]),
SimulatedInteraction((8 , 3 ), [1], rewards=[1])
]
mem_interactions = interactions
imp_interactions = list(Impute().filter(interactions))
self.assertEqual((7,2), mem_interactions[0].context)
#self.assertEqual((1,9), mem_interactions[1].context)
self.assertEqual((8,3), mem_interactions[2].context)
self.assertEqual(3, len(imp_interactions))
self.assertEqual((7 , 2), imp_interactions[0].context)
self.assertEqual((7.5,2.5), imp_interactions[1].context)
self.assertEqual((8 ,3 ), imp_interactions[2].context)
def test_impute_med(self) -> None:
interactions = [
SimulatedInteraction((7 , 2 ), [1], rewards=[1]),
SimulatedInteraction((7 , 2 ), [1], rewards=[1]),
SimulatedInteraction((float('nan'), float('nan')), [1], rewards=[1]),
SimulatedInteraction((8 , 3 ), [1], rewards=[1])
]
imp_interactions = list(Impute("median").filter(interactions))
self.assertEqual(4, len(imp_interactions))
self.assertEqual((7, 2), imp_interactions[0].context)
self.assertEqual((7, 2), imp_interactions[1].context)
self.assertEqual((7, 2), imp_interactions[2].context)
self.assertEqual((8, 3), imp_interactions[3].context)
def test_impute_med_with_str(self) -> None:
interactions = [
SimulatedInteraction((7 , 2 , "A"), [1], rewards=[1]),
SimulatedInteraction((7 , 2 , "A"), [1], rewards=[1]),
SimulatedInteraction((float('nan'), float('nan'), "A"), [1], rewards=[1]),
SimulatedInteraction((8 , 3 , "A"), [1], rewards=[1])
]
imp_interactions = list(Impute("median").filter(interactions))
self.assertEqual(4, len(imp_interactions))
self.assertEqual((7, 2, "A"), imp_interactions[0].context)
self.assertEqual((7, 2, "A"), imp_interactions[1].context)
self.assertEqual((7, 2, "A"), imp_interactions[2].context)
self.assertEqual((8, 3, "A"), imp_interactions[3].context)
def test_params(self):
self.assertEqual({"scale_shift":"mean","scale_scale":"std","scale_using":None}, Scale(shift="mean",scale="std").params)
self.assertEqual({"scale_shift":2,"scale_scale":1/2,"scale_using":None}, Scale(shift=2,scale=1/2).params)
self.assertEqual({"scale_shift":2,"scale_scale":1/2,"scale_using":10}, Scale(shift=2,scale=1/2,using=10).params)
if __name__ == '__main__':
unittest.main()
| 40.042129
| 127
| 0.61731
| 2,131
| 18,059
| 5.097607
| 0.035664
| 0.197459
| 0.13569
| 0.090767
| 0.956458
| 0.93777
| 0.916229
| 0.880604
| 0.828224
| 0.816717
| 0
| 0.051438
| 0.216291
| 18,059
| 451
| 128
| 40.042129
| 0.716103
| 0.002879
| 0
| 0.634375
| 0
| 0
| 0.030544
| 0
| 0
| 0
| 0
| 0
| 0.459375
| 1
| 0.075
| false
| 0
| 0.0125
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3aba6a1f7cbf8222e4f9314a3da64497342b943
| 11,425
|
py
|
Python
|
fixedcolors.py
|
BullGator024/zandro_rcon
|
4cc584eb4990306e96b77b1aa9021b7d5fccae62
|
[
"MIT"
] | 1
|
2022-01-02T16:00:42.000Z
|
2022-01-02T16:00:42.000Z
|
fixedcolors.py
|
BullGator024/zandro_rcon
|
4cc584eb4990306e96b77b1aa9021b7d5fccae62
|
[
"MIT"
] | null | null | null |
fixedcolors.py
|
BullGator024/zandro_rcon
|
4cc584eb4990306e96b77b1aa9021b7d5fccae62
|
[
"MIT"
] | null | null | null |
clist = [
"\\c[uh0]", "\\c[uh1]", "\\c[uh2]", "\\c[uh3]", "\\c[uh4]", "\\c[uh5]", "\\c[uh6]", "\\c[uh7]", "\\c[uh8]",
"\\c[uh9]", "\\c[uh10]", "\\c[uh11]", "\\c[uh12]", "\\c[uh13]", "\\c[uh14]", "\\c[uh15]", "\\c[uh16]", "\\c[uh17]",
"\\c[uh18]", "\\c[uh19]", "\\c[uh20]", "\\c[uh21]", "\\c[uh22]", "\\c[uh23]", "\\c[uh24]", "\\c[uh25]", "\\c[uh26]",
"\\c[uh27]", "\\c[uh28]", "\\c[uh29]", "\\c[uh30]", "\\c[uh31]", "\\c[uh32]", "\\c[uh33]", "\\c[uh34]", "\\c[uh35]",
"\\c[uh36]", "\\c[uh37]", "\\c[uh38]", "\\c[uh39]", "\\c[uh40]", "\\c[uh41]", "\\c[uh42]", "\\c[uh43]", "\\c[uh44]",
"\\c[uh45]", "\\c[uh46]", "\\c[uh47]", "\\c[uh48]", "\\c[uh49]", "\\c[uh50]", "\\c[uh51]", "\\c[uh52]", "\\c[uh53]",
"\\c[uh54]", "\\c[uh55]", "\\c[uh56]", "\\c[uh57]", "\\c[uh58]", "\\c[uh59]", "\\c[uh60]", "\\c[uh61]", "\\c[uh62]",
"\\c[uh63]", "\\c[uh64]", "\\c[uh65]", "\\c[uh66]", "\\c[uh67]", "\\c[uh68]", "\\c[uh69]", "\\c[uh70]", "\\c[uh71]",
"\\c[uh72]", "\\c[uh73]", "\\c[uh74]", "\\c[uh75]", "\\c[uh76]", "\\c[uh77]", "\\c[uh78]", "\\c[uh79]",
"\\\\c[a1]", "\\\\c[a2]", "\\\\c[a3]", "\\\\c[a4]", "\\\\c[a5]", "\\\\c[a6]", "\\\\c[a7]", "\\\\c[a8]", "\\\\c[a9]", "\\\\c[a0]",
"\\\\c[b1]", "\\\\c[b2]", "\\\\c[b3]", "\\\\c[b4]", "\\\\c[b5]", "\\\\c[b6]", "\\\\c[b7]", "\\\\c[b8]", "\\\\c[b9]", "\\\\c[b0]",
"\\\\c[c1]", "\\\\c[c2]", "\\\\c[c3]", "\\\\c[c4]", "\\\\c[c5]", "\\\\c[c6]", "\\\\c[c7]", "\\\\c[c8]", "\\\\c[c9]", "\\\\c[c0]",
"\\\\c[d1]", "\\\\c[d2]", "\\\\c[d3]", "\\\\c[d4]", "\\\\c[d5]", "\\\\c[d6]", "\\\\c[d7]", "\\\\c[d8]", "\\\\c[d9]", "\\\\c[d0]",
"\\\\c[e1]", "\\\\c[e2]", "\\\\c[e3]", "\\\\c[e4]", "\\\\c[e5]", "\\\\c[e6]", "\\\\c[e7]", "\\\\c[e8]", "\\\\c[e9]", "\\\\c[e0]",
"\\\\c[f1]", "\\\\c[f2]", "\\\\c[f3]", "\\\\c[f4]", "\\\\c[f5]", "\\\\c[f6]", "\\\\c[f7]", "\\\\c[f8]", "\\\\c[f9]", "\\\\c[f0]",
"\\\\c[g1]", "\\\\c[g2]", "\\\\c[g3]", "\\\\c[g4]", "\\\\c[g5]", "\\\\c[g6]", "\\\\c[g7]", "\\\\c[g8]", "\\\\c[g9]", "\\\\c[g0]",
"\\\\c[h1]", "\\\\c[h2]", "\\\\c[h3]", "\\\\c[h4]", "\\\\c[h5]", "\\\\c[h6]", "\\\\c[h7]", "\\\\c[h8]", "\\\\c[h9]", "\\\\c[h0]",
"\\\\c[i1]", "\\\\c[i2]", "\\\\c[i3]", "\\\\c[i4]", "\\\\c[i5]", "\\\\c[i6]", "\\\\c[i7]", "\\\\c[i8]", "\\\\c[i9]", "\\\\c[i0]",
"\\\\c[j1]", "\\\\c[j2]", "\\\\c[j3]", "\\\\c[j4]", "\\\\c[j5]", "\\\\c[j6]", "\\\\c[j7]", "\\\\c[j8]", "\\\\c[j9]", "\\\\c[j0]",
"\\\\c[k1]", "\\\\c[k2]", "\\\\c[k3]", "\\\\c[k4]", "\\\\c[k5]", "\\\\c[k6]", "\\\\c[k7]", "\\\\c[k8]", "\\\\c[k9]", "\\\\c[k0]",
"\\\\c[l1]", "\\\\c[l2]", "\\\\c[l3]", "\\\\c[l4]", "\\\\c[l5]", "\\\\c[l6]", "\\\\c[l7]", "\\\\c[l8]", "\\\\c[l9]", "\\\\c[l0]",
"\\\\c[m1]", "\\\\c[m2]", "\\\\c[m3]", "\\\\c[m4]", "\\\\c[m5]", "\\\\c[m6]", "\\\\c[m7]", "\\\\c[m8]", "\\\\c[m9]", "\\\\c[m0]",
"\\\\c[n1]", "\\\\c[n2]", "\\\\c[n3]", "\\\\c[n4]", "\\\\c[n5]", "\\\\c[n6]", "\\\\c[n7]", "\\\\c[n8]", "\\\\c[n9]", "\\\\c[n0]",
"\\\\c[o1]", "\\\\c[o2]", "\\\\c[o3]", "\\\\c[o4]", "\\\\c[o5]", "\\\\c[o6]", "\\\\c[o7]", "\\\\c[o8]", "\\\\c[o9]", "\\\\c[o0]",
"\\\\c[p1]", "\\\\c[p2]", "\\\\c[p3]", "\\\\c[p4]", "\\\\c[p5]", "\\\\c[p6]", "\\\\c[p7]", "\\\\c[p8]", "\\\\c[p9]", "\\\\c[p0]",
"\\\\c[q1]", "\\\\c[q2]", "\\\\c[q3]", "\\\\c[q4]", "\\\\c[q5]", "\\\\c[q6]", "\\\\c[q7]", "\\\\c[q8]", "\\\\c[q9]", "\\\\c[q0]",
"\\\\c[r1]", "\\\\c[r2]", "\\\\c[r3]", "\\\\c[r4]", "\\\\c[r5]", "\\\\c[r6]", "\\\\c[r7]", "\\\\c[r8]", "\\\\c[r9]", "\\\\c[r0]",
"\\\\c[s1]", "\\\\c[s2]", "\\\\c[s3]", "\\\\c[s4]", "\\\\c[s5]", "\\\\c[s6]", "\\\\c[s7]", "\\\\c[s8]", "\\\\c[s9]", "\\\\c[s0]",
"\\\\c[t1]", "\\\\c[t2]", "\\\\c[t3]", "\\\\c[t4]", "\\\\c[t5]", "\\\\c[t6]", "\\\\c[t7]", "\\\\c[t8]", "\\\\c[t9]", "\\\\c[t0]",
"\\\\c[u1]", "\\\\c[u2]", "\\\\c[u3]", "\\\\c[u4]", "\\\\c[u5]", "\\\\c[u6]", "\\\\c[u7]", "\\\\c[u8]", "\\\\c[u9]", "\\\\c[u0]",
"\\\\c[v1]", "\\\\c[v2]", "\\\\c[v3]", "\\\\c[v4]", "\\\\c[v5]", "\\\\c[v6]", "\\\\c[v7]", "\\\\c[v8]", "\\\\c[v9]", "\\\\c[v0]",
"\\\\c[w1]", "\\\\c[w2]", "\\\\c[w3]", "\\\\c[w4]", "\\\\c[w5]", "\\\\c[w6]", "\\\\c[w7]", "\\\\c[w8]", "\\\\c[w9]", "\\\\c[w0]",
"\\\\c[x1]", "\\\\c[x2]", "\\\\c[x3]", "\\\\c[x4]", "\\\\c[x5]", "\\\\c[x6]", "\\\\c[x7]", "\\\\c[x8]", "\\\\c[x9]", "\\\\c[x0]",
"\\\\c[y1]", "\\\\c[y2]", "\\\\c[y3]", "\\\\c[y4]", "\\\\c[y5]", "\\\\c[y6]", "\\\\c[y7]", "\\\\c[y8]", "\\\\c[y9]", "\\\\c[y0]",
"\\\\c[z1]", "\\\\c[z2]", "\\\\c[z3]", "\\\\c[z4]", "\\\\c[z5]", "\\\\c[z6]", "\\\\c[z7]", "\\\\c[z8]", "\\\\c[z9]", "\\\\c[z0]",
"\\\\c[-1]", "\\\\c[-2]", "\\\\c[-3]", "\\\\c[-4]", "\\\\c[-5]", "\\\\c[-6]", "\\\\c[-7]", "\\\\c[-8]", "\\\\c[-9]", "\\\\c[-0]",
"\\c[a1]", "\\c[a2]", "\\c[a3]", "\\c[a4]", "\\c[a5]", "\\c[a6]", "\\c[a7]", "\\c[a8]", "\\c[a9]", "\\c[a0]",
"\\c[b1]", "\\c[b2]", "\\c[b3]", "\\c[b4]", "\\c[b5]", "\\c[b6]", "\\c[b7]", "\\c[b8]", "\\c[b9]", "\\c[b0]",
"\\c[c1]", "\\c[c2]", "\\c[c3]", "\\c[c4]", "\\c[c5]", "\\c[c6]", "\\c[c7]", "\\c[c8]", "\\c[c9]", "\\c[c0]",
"\\c[d1]", "\\c[d2]", "\\c[d3]", "\\c[d4]", "\\c[d5]", "\\c[d6]", "\\c[d7]", "\\c[d8]", "\\c[d9]", "\\c[d0]",
"\\c[e1]", "\\c[e2]", "\\c[e3]", "\\c[e4]", "\\c[e5]", "\\c[e6]", "\\c[e7]", "\\c[e8]", "\\c[e9]", "\\c[e0]",
"\\c[f1]", "\\c[f2]", "\\c[f3]", "\\c[f4]", "\\c[f5]", "\\c[f6]", "\\c[f7]", "\\c[f8]", "\\c[f9]", "\\c[f0]",
"\\c[g1]", "\\c[g2]", "\\c[g3]", "\\c[g4]", "\\c[g5]", "\\c[g6]", "\\c[g7]", "\\c[g8]", "\\c[g9]", "\\c[g0]",
"\\c[h1]", "\\c[h2]", "\\c[h3]", "\\c[h4]", "\\c[h5]", "\\c[h6]", "\\c[h7]", "\\c[h8]", "\\c[h9]", "\\c[h0]",
"\\c[i1]", "\\c[i2]", "\\c[i3]", "\\c[i4]", "\\c[i5]", "\\c[i6]", "\\c[i7]", "\\c[i8]", "\\c[i9]", "\\c[i0]",
"\\c[j1]", "\\c[j2]", "\\c[j3]", "\\c[j4]", "\\c[j5]", "\\c[j6]", "\\c[j7]", "\\c[j8]", "\\c[j9]", "\\c[j0]",
"\\c[k1]", "\\c[k2]", "\\c[k3]", "\\c[k4]", "\\c[k5]", "\\c[k6]", "\\c[k7]", "\\c[k8]", "\\c[k9]", "\\c[k0]",
"\\c[l1]", "\\c[l2]", "\\c[l3]", "\\c[l4]", "\\c[l5]", "\\c[l6]", "\\c[l7]", "\\c[l8]", "\\c[l9]", "\\c[l0]",
"\\c[m1]", "\\c[m2]", "\\c[m3]", "\\c[m4]", "\\c[m5]", "\\c[m6]", "\\c[m7]", "\\c[m8]", "\\c[m9]", "\\c[m0]",
"\\c[n1]", "\\c[n2]", "\\c[n3]", "\\c[n4]", "\\c[n5]", "\\c[n6]", "\\c[n7]", "\\c[n8]", "\\c[n9]", "\\c[n0]",
"\\c[o1]", "\\c[o2]", "\\c[o3]", "\\c[o4]", "\\c[o5]", "\\c[o6]", "\\c[o7]", "\\c[o8]", "\\c[o9]", "\\c[o0]",
"\\c[p1]", "\\c[p2]", "\\c[p3]", "\\c[p4]", "\\c[p5]", "\\c[p6]", "\\c[p7]", "\\c[p8]", "\\c[p9]", "\\c[p0]",
"\\c[q1]", "\\c[q2]", "\\c[q3]", "\\c[q4]", "\\c[q5]", "\\c[q6]", "\\c[q7]", "\\c[q8]", "\\c[q9]", "\\c[q0]",
"\\c[r1]", "\\c[r2]", "\\c[r3]", "\\c[r4]", "\\c[r5]", "\\c[r6]", "\\c[r7]", "\\c[r8]", "\\c[r9]", "\\c[r0]",
"\\c[s1]", "\\c[s2]", "\\c[s3]", "\\c[s4]", "\\c[s5]", "\\c[s6]", "\\c[s7]", "\\c[s8]", "\\c[s9]", "\\c[s0]",
"\\c[t1]", "\\c[t2]", "\\c[t3]", "\\c[t4]", "\\c[t5]", "\\c[t6]", "\\c[t7]", "\\c[t8]", "\\c[t9]", "\\c[t0]",
"\\c[u1]", "\\c[u2]", "\\c[u3]", "\\c[u4]", "\\c[u5]", "\\c[u6]", "\\c[u7]", "\\c[u8]", "\\c[u9]", "\\c[u0]",
"\\c[v1]", "\\c[v2]", "\\c[v3]", "\\c[v4]", "\\c[v5]", "\\c[v6]", "\\c[v7]", "\\c[v8]", "\\c[v9]", "\\c[v0]",
"\\c[w1]", "\\c[w2]", "\\c[w3]", "\\c[w4]", "\\c[w5]", "\\c[w6]", "\\c[w7]", "\\c[w8]", "\\c[w9]", "\\c[w0]",
"\\c[x1]", "\\c[x2]", "\\c[x3]", "\\c[x4]", "\\c[x5]", "\\c[x6]", "\\c[x7]", "\\c[x8]", "\\c[x9]", "\\c[x0]",
"\\c[y1]", "\\c[y2]", "\\c[y3]", "\\c[y4]", "\\c[y5]", "\\c[y6]", "\\c[y7]", "\\c[y8]", "\\c[y9]", "\\c[y0]",
"\\c[z1]", "\\c[z2]", "\\c[z3]", "\\c[z4]", "\\c[z5]", "\\c[z6]", "\\c[z7]", "\\c[z8]", "\\c[z9]", "\\c[z0]",
"\\c[-1]", "\\c[-2]", "\\c[-3]", "\\c[-4]", "\\c[-5]", "\\c[-6]", "\\c[-7]", "\\c[-8]", "\\c[-9]", "\\c[-0]",
"\\c[A1]", "\\c[A2]", "\\c[A3]", "\\c[A4]", "\\c[A5]", "\\c[A6]", "\\c[A7]", "\\c[A8]", "\\c[A9]", "\\c[A0]",
"\\c[B1]", "\\c[B2]", "\\c[B3]", "\\c[B4]", "\\c[B5]", "\\c[B6]", "\\c[B7]", "\\c[B8]", "\\c[B9]", "\\c[B0]",
"\\c[C1]", "\\c[C2]", "\\c[C3]", "\\c[C4]", "\\c[C5]", "\\c[C6]", "\\c[C7]", "\\c[C8]", "\\c[C9]", "\\c[C0]",
"\\c[D1]", "\\c[D2]", "\\c[D3]", "\\c[D4]", "\\c[D5]", "\\c[D6]", "\\c[D7]", "\\c[D8]", "\\c[D9]", "\\c[D0]",
"\\c[E1]", "\\c[E2]", "\\c[E3]", "\\c[E4]", "\\c[E5]", "\\c[E6]", "\\c[E7]", "\\c[E8]", "\\c[E9]", "\\c[E0]",
"\\c[F1]", "\\c[F2]", "\\c[F3]", "\\c[F4]", "\\c[F5]", "\\c[F6]", "\\c[F7]", "\\c[F8]", "\\c[F9]", "\\c[F0]",
"\\c[G1]", "\\c[G2]", "\\c[G3]", "\\c[G4]", "\\c[G5]", "\\c[G6]", "\\c[G7]", "\\c[G8]", "\\c[G9]", "\\c[G0]",
"\\c[H1]", "\\c[H2]", "\\c[H3]", "\\c[H4]", "\\c[H5]", "\\c[H6]", "\\c[H7]", "\\c[H8]", "\\c[H9]", "\\c[H0]",
"\\c[I1]", "\\c[I2]", "\\c[I3]", "\\c[I4]", "\\c[I5]", "\\c[I6]", "\\c[I7]", "\\c[I8]", "\\c[I9]", "\\c[I0]",
"\\c[J1]", "\\c[J2]", "\\c[J3]", "\\c[J4]", "\\c[J5]", "\\c[J6]", "\\c[J7]", "\\c[J8]", "\\c[J9]", "\\c[J0]",
"\\c[K1]", "\\c[K2]", "\\c[K3]", "\\c[K4]", "\\c[K5]", "\\c[K6]", "\\c[K7]", "\\c[K8]", "\\c[K9]", "\\c[K0]",
"\\c[L1]", "\\c[L2]", "\\c[L3]", "\\c[L4]", "\\c[L5]", "\\c[L6]", "\\c[L7]", "\\c[L8]", "\\c[L9]", "\\c[L0]",
"\\c[M1]", "\\c[M2]", "\\c[M3]", "\\c[M4]", "\\c[M5]", "\\c[M6]", "\\c[M7]", "\\c[M8]", "\\c[M9]", "\\c[M0]",
"\\c[N1]", "\\c[N2]", "\\c[N3]", "\\c[N4]", "\\c[N5]", "\\c[N6]", "\\c[N7]", "\\c[N8]", "\\c[N9]", "\\c[N0]",
"\\c[O1]", "\\c[O2]", "\\c[O3]", "\\c[O4]", "\\c[O5]", "\\c[O6]", "\\c[O7]", "\\c[O8]", "\\c[O9]", "\\c[O0]",
"\\c[P1]", "\\c[P2]", "\\c[P3]", "\\c[P4]", "\\c[P5]", "\\c[P6]", "\\c[P7]", "\\c[P8]", "\\c[P9]", "\\c[P0]",
"\\c[Q1]", "\\c[Q2]", "\\c[Q3]", "\\c[Q4]", "\\c[Q5]", "\\c[Q6]", "\\c[Q7]", "\\c[Q8]", "\\c[Q9]", "\\c[Q0]",
"\\c[R1]", "\\c[R2]", "\\c[R3]", "\\c[R4]", "\\c[R5]", "\\c[R6]", "\\c[R7]", "\\c[R8]", "\\c[R9]", "\\c[R0]",
"\\c[S1]", "\\c[S2]", "\\c[S3]", "\\c[S4]", "\\c[S5]", "\\c[S6]", "\\c[S7]", "\\c[S8]", "\\c[S9]", "\\c[S0]",
"\\c[T1]", "\\c[T2]", "\\c[T3]", "\\c[T4]", "\\c[T5]", "\\c[T6]", "\\c[T7]", "\\c[T8]", "\\c[T9]", "\\c[T0]",
"\\c[U1]", "\\c[U2]", "\\c[U3]", "\\c[U4]", "\\c[U5]", "\\c[U6]", "\\c[U7]", "\\c[U8]", "\\c[U9]", "\\c[U0]",
"\\c[V1]", "\\c[V2]", "\\c[V3]", "\\c[V4]", "\\c[V5]", "\\c[V6]", "\\c[V7]", "\\c[V8]", "\\c[V9]", "\\c[V0]",
"\\c[W1]", "\\c[W2]", "\\c[W3]", "\\c[W4]", "\\c[W5]", "\\c[W6]", "\\c[W7]", "\\c[W8]", "\\c[W9]", "\\c[W0]",
"\\c[X1]", "\\c[X2]", "\\c[X3]", "\\c[X4]", "\\c[X5]", "\\c[X6]", "\\c[X7]", "\\c[X8]", "\\c[X9]", "\\c[X0]",
"\\c[Y1]", "\\c[Y2]", "\\c[Y3]", "\\c[Y4]", "\\c[Y5]", "\\c[Y6]", "\\c[Y7]", "\\c[Y8]", "\\c[Y9]", "\\c[Y0]",
"\\c[Z1]", "\\c[Z2]", "\\c[Z3]", "\\c[Z4]", "\\c[Z5]", "\\c[Z6]", "\\c[Z7]", "\\c[Z8]", "\\c[Z9]", "\\c[Z0]",
"\\c[-1]", "\\c[-2]", "\\c[-3]", "\\c[-4]", "\\c[-5]", "\\c[-6]", "\\c[-7]", "\\c[-8]", "\\c[-9]", "\\c[-0]",
"\\ca", "\\cb", "\\cc", "\\cd", "\\ce", "\\cf", "\\cg", "\\ch", "\\ci", "\\cj", "\\ck", "\\cl", "\\cm", "\\cn", "\\co", "\\cp", "\\cq", "\\cr", "\\cs", "\\ct", "\\cu", "\\cv", "\\cw", "\\cx", "\\cy", "\\cz", "\\c-",
"\\cA", "\\cB", "\\cC", "\\cD", "\\cE", "\\cF", "\\cG", "\\cH", "\\cI", "\\cJ", "\\cK", "\\cL", "\\cM", "\\cN", "\\cO", "\\cP", "\\cQ", "\\cR", "\\cS", "\\cT", "\\cU", "\\cV", "\\cW", "\\cX", "\\cY", "\\cZ", "\\c-",
"\\c",
]
def get_color_less(the_string: str):
''' Returns the raw message from the server
Haha, text colors :)
'''
for i in clist:
the_string = the_string.replace(i, '')
return the_string
| 102.927928
| 218
| 0.265558
| 1,866
| 11,425
| 1.622722
| 0.21329
| 0.011889
| 0.003963
| 0.005945
| 0.827609
| 0.827609
| 0.827609
| 0.827609
| 0.827609
| 0.827609
| 0
| 0.094721
| 0.11291
| 11,425
| 110
| 219
| 103.863636
| 0.204045
| 0.005252
| 0
| 0.020202
| 0
| 0
| 0.634916
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010101
| false
| 0
| 0
| 0
| 0.020202
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b3ba998fe58557747208404d7d1e3835a1e6b41b
| 1,240
|
py
|
Python
|
tests/test_Bankroll.py
|
wdm0006/keeks
|
ca10d1579932799160bc53bedb19a76330986da0
|
[
"MIT"
] | 1
|
2021-05-17T21:06:04.000Z
|
2021-05-17T21:06:04.000Z
|
tests/test_Bankroll.py
|
helton-tech/keeks
|
ca10d1579932799160bc53bedb19a76330986da0
|
[
"MIT"
] | null | null | null |
tests/test_Bankroll.py
|
helton-tech/keeks
|
ca10d1579932799160bc53bedb19a76330986da0
|
[
"MIT"
] | null | null | null |
import unittest
from keeks.bankroll import BankRoll
from keeks.utils import RuinException
class TestBankroll(unittest.TestCase):
def test_Transactions(self):
br = BankRoll(initial_funds=1000, percent_bettable=1, max_draw_down=1)
self.assertEqual(br.bettable_funds, 1000)
self.assertEqual(br.total_funds, 1000)
br.deposit(1000)
self.assertEqual(br.bettable_funds, 2000)
self.assertEqual(br.total_funds, 2000)
br.withdraw(500)
self.assertEqual(br.bettable_funds, 1500)
self.assertEqual(br.total_funds, 1500)
def test_PercentBettable(self):
br = BankRoll(initial_funds=1000, percent_bettable=0.5, max_draw_down=1)
self.assertEqual(br.bettable_funds, 500)
self.assertEqual(br.total_funds, 1000)
br.deposit(1000)
self.assertEqual(br.bettable_funds, 1000)
self.assertEqual(br.total_funds, 2000)
br.withdraw(500)
self.assertEqual(br.bettable_funds, 750)
self.assertEqual(br.total_funds, 1500)
def test_Drawdown_Limit(self):
br = BankRoll(initial_funds=1000, percent_bettable=0.5, max_draw_down=0.3)
with self.assertRaises(RuinException):
br.withdraw(500)
| 33.513514
| 82
| 0.695968
| 159
| 1,240
| 5.251572
| 0.251572
| 0.215569
| 0.244311
| 0.179641
| 0.735329
| 0.735329
| 0.735329
| 0.735329
| 0.590419
| 0.538922
| 0
| 0.085366
| 0.206452
| 1,240
| 36
| 83
| 34.444444
| 0.763211
| 0
| 0
| 0.464286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.464286
| 1
| 0.107143
| false
| 0
| 0.107143
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3dac9ce4ab7cab7d6ad7f20a13145532c2f3bf1
| 141,491
|
py
|
Python
|
azure/mgmt/sql/operations/databases_operations.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2020-07-29T14:22:17.000Z
|
2020-11-06T18:47:40.000Z
|
azure/mgmt/sql/operations/databases_operations.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2016-08-01T07:37:04.000Z
|
2016-08-01T07:37:04.000Z
|
azure/mgmt/sql/operations/databases_operations.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2020-12-12T21:04:41.000Z
|
2020-12-12T21:04:41.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class DatabasesOperations(object):
"""DatabasesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar data_masking_policy_name: The name of the database for which the data masking rule applies. Constant value: "Default".
:ivar geo_backup_policy_name: The name of the geo backup policy. Constant value: "Default".
:ivar extension_name: The name of the operation to perform. Constant value: "import".
:ivar security_alert_policy_name: The name of the security alert policy. Constant value: "default".
:ivar backup_long_term_retention_policy_name: The name of the backup long term retention policy. Constant value: "Default".
:ivar blob_auditing_policy_name: The name of the blob auditing policy. Constant value: "default".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.data_masking_policy_name = "Default"
self.geo_backup_policy_name = "Default"
self.extension_name = "import"
self.security_alert_policy_name = "default"
self.backup_long_term_retention_policy_name = "Default"
self.blob_auditing_policy_name = "default"
self.config = config
def list_restore_points(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns a list of database restore points.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database from which to retrieve
available restore points.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RestorePointPaged
<azure.mgmt.sql.models.RestorePointPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/restorePoints'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.RestorePointPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RestorePointPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def create_or_update_data_masking_policy(
self, resource_group_name, server_name, database_name, data_masking_state, exempt_principals=None, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database data masking policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param data_masking_state: The state of the data masking policy.
Possible values include: 'Disabled', 'Enabled'
:type data_masking_state: str or :class:`DataMaskingState
<azure.mgmt.sql.models.DataMaskingState>`
:param exempt_principals: The list of the exempt principals. Specifies
the semicolon-separated list of database users for which the data
masking policy does not apply. The specified users receive data
results without masking for all of the database queries.
:type exempt_principals: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DataMaskingPolicy
<azure.mgmt.sql.models.DataMaskingPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.DataMaskingPolicy(data_masking_state=data_masking_state, exempt_principals=exempt_principals)
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/dataMaskingPolicies/{dataMaskingPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DataMaskingPolicy')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataMaskingPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_data_masking_policy(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a database data masking policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DataMaskingPolicy
<azure.mgmt.sql.models.DataMaskingPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/dataMaskingPolicies/{dataMaskingPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataMaskingPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_data_masking_rule(
self, resource_group_name, server_name, database_name, data_masking_rule_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database data masking rule.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param data_masking_rule_name: The name of the data masking rule.
:type data_masking_rule_name: str
:param parameters: The required parameters for creating or updating a
data masking rule.
:type parameters: :class:`DataMaskingRule
<azure.mgmt.sql.models.DataMaskingRule>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DataMaskingRule
<azure.mgmt.sql.models.DataMaskingRule>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules/{dataMaskingRuleName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str'),
'dataMaskingRuleName': self._serialize.url("data_masking_rule_name", data_masking_rule_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DataMaskingRule')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataMaskingRule', response)
if response.status_code == 201:
deserialized = self._deserialize('DataMaskingRule', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_data_masking_rules(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a list of database data masking rules.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DataMaskingRulePaged
<azure.mgmt.sql.models.DataMaskingRulePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'dataMaskingPolicyName': self._serialize.url("self.data_masking_policy_name", self.data_masking_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DataMaskingRulePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DataMaskingRulePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def create_or_update_geo_backup_policy(
self, resource_group_name, server_name, database_name, state, custom_headers=None, raw=False, **operation_config):
"""Updates a database geo backup policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param state: The state of the geo backup policy. Possible values
include: 'Disabled', 'Enabled'
:type state: str or :class:`GeoBackupPolicyState
<azure.mgmt.sql.models.GeoBackupPolicyState>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`GeoBackupPolicy
<azure.mgmt.sql.models.GeoBackupPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.GeoBackupPolicy(state=state)
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/geoBackupPolicies/{geoBackupPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'geoBackupPolicyName': self._serialize.url("self.geo_backup_policy_name", self.geo_backup_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GeoBackupPolicy')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('GeoBackupPolicy', response)
if response.status_code == 200:
deserialized = self._deserialize('GeoBackupPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_geo_backup_policy(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a geo backup policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`GeoBackupPolicy
<azure.mgmt.sql.models.GeoBackupPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/geoBackupPolicies/{geoBackupPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'geoBackupPolicyName': self._serialize.url("self.geo_backup_policy_name", self.geo_backup_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GeoBackupPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_geo_backup_policies(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns a list of geo backup policies.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`GeoBackupPolicyPaged
<azure.mgmt.sql.models.GeoBackupPolicyPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/geoBackupPolicies'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.GeoBackupPolicyPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.GeoBackupPolicyPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def import_method(
self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Imports a bacpac into a new database. .
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param parameters: The required parameters for importing a Bacpac into
a database.
:type parameters: :class:`ImportRequest
<azure.mgmt.sql.models.ImportRequest>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`ImportExportResponse
<azure.mgmt.sql.models.ImportExportResponse>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/import'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ImportRequest')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImportExportResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def create_import_operation(
self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates an import operation that imports a bacpac into an existing
database. The existing database must be empty.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to import into
:type database_name: str
:param parameters: The required parameters for importing a Bacpac into
a database.
:type parameters: :class:`ImportExtensionRequest
<azure.mgmt.sql.models.ImportExtensionRequest>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`ImportExportResponse
<azure.mgmt.sql.models.ImportExportResponse>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/extensions/{extensionName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'extensionName': self._serialize.url("self.extension_name", self.extension_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ImportExtensionRequest')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('ImportExportResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def export(
self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Exports a database to a bacpac.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to be exported.
:type database_name: str
:param parameters: The required parameters for exporting a database.
:type parameters: :class:`ExportRequest
<azure.mgmt.sql.models.ExportRequest>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`ImportExportResponse
<azure.mgmt.sql.models.ImportExportResponse>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/export'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ExportRequest')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImportExportResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_metrics(
self, resource_group_name, server_name, database_name, filter, custom_headers=None, raw=False, **operation_config):
"""Returns database metrics.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param filter: An OData filter expression that describes a subset of
metrics to return.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`MetricPaged <azure.mgmt.sql.models.MetricPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/metrics'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.MetricPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.MetricPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_metric_definitions(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns database metric definitions.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`MetricDefinitionPaged
<azure.mgmt.sql.models.MetricDefinitionPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/metricDefinitions'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.MetricDefinitionPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.MetricDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def delete_replication_link(
self, resource_group_name, server_name, database_name, link_id, custom_headers=None, raw=False, **operation_config):
"""Deletes a database replication link. Cannot be done during failover.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database that has the
replication link to be dropped.
:type database_name: str
:param link_id: The ID of the replication link to be deleted.
:type link_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/replicationLinks/{linkId}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'linkId': self._serialize.url("link_id", link_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_replication_link(
self, resource_group_name, server_name, database_name, link_id, custom_headers=None, raw=False, **operation_config):
"""Gets a database replication link.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to get the link for.
:type database_name: str
:param link_id: The replication link ID to be retrieved.
:type link_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ReplicationLink
<azure.mgmt.sql.models.ReplicationLink>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/replicationLinks/{linkId}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'linkId': self._serialize.url("link_id", link_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ReplicationLink', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def failover_replication_link(
self, resource_group_name, server_name, database_name, link_id, custom_headers=None, raw=False, **operation_config):
"""Sets which replica database is primary by failing over from the current
primary replica database.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database that has the
replication link to be failed over.
:type database_name: str
:param link_id: The ID of the replication link to be failed over.
:type link_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/replicationLinks/{linkId}/failover'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'linkId': self._serialize.url("link_id", link_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [204, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def failover_replication_link_allow_data_loss(
self, resource_group_name, server_name, database_name, link_id, custom_headers=None, raw=False, **operation_config):
"""Sets which replica database is primary by failing over from the current
primary replica database. This operation might result in data loss.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database that has the
replication link to be failed over.
:type database_name: str
:param link_id: The ID of the replication link to be failed over.
:type link_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/replicationLinks/{linkId}/forceFailoverAllowDataLoss'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'linkId': self._serialize.url("link_id", link_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [204, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_replication_links(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Lists a database's replication links.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to retrieve links for.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ReplicationLinkPaged
<azure.mgmt.sql.models.ReplicationLinkPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/replicationLinks'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ReplicationLinkPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ReplicationLinkPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def pause(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Pauses a data warehouse.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the data warehouse to pause.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/pause'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def resume(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Resumes a data warehouse.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the data warehouse to resume.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/resume'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def create_or_update(
self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates a new database or updates an existing database. Location is a
required property in the request body, and it must be the same as the
location of the SQL server.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to be operated on
(updated or created).
:type database_name: str
:param parameters: The required parameters for creating or updating a
database.
:type parameters: :class:`Database <azure.mgmt.sql.models.Database>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Database
<azure.mgmt.sql.models.Database>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'Database')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Database', response)
if response.status_code == 201:
deserialized = self._deserialize('Database', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a database.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to be deleted.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get(
self, resource_group_name, server_name, database_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets a database.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database to be retrieved.
:type database_name: str
:param expand: A comma separated list of child objects to expand in
the response. Possible properties: serviceTierAdvisors,
transparentDataEncryption.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Database <azure.mgmt.sql.models.Database>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Database', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_server(
self, resource_group_name, server_name, expand=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""Returns a list of databases in a server.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param expand: A comma separated list of child objects to expand in
the response. Possible properties: serviceTierAdvisors,
transparentDataEncryption.
:type expand: str
:param filter: An OData filter expression that describes a subset of
databases to return.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabasePaged <azure.mgmt.sql.models.DatabasePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DatabasePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DatabasePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_service_tier_advisor(
self, resource_group_name, server_name, database_name, service_tier_advisor_name, custom_headers=None, raw=False, **operation_config):
"""Gets a service tier advisor.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of database.
:type database_name: str
:param service_tier_advisor_name: The name of service tier advisor.
:type service_tier_advisor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ServiceTierAdvisor
<azure.mgmt.sql.models.ServiceTierAdvisor>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/serviceTierAdvisors/{serviceTierAdvisorName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'serviceTierAdvisorName': self._serialize.url("service_tier_advisor_name", service_tier_advisor_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ServiceTierAdvisor', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_service_tier_advisors(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns service tier advisors for specified database.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ServiceTierAdvisorPaged
<azure.mgmt.sql.models.ServiceTierAdvisorPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/serviceTierAdvisors'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ServiceTierAdvisorPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ServiceTierAdvisorPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def create_or_update_transparent_data_encryption_configuration(
self, resource_group_name, server_name, database_name, status=None, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database's transparent data encryption
configuration.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which setting the
transparent data encryption applies.
:type database_name: str
:param status: The status of the database transparent data encryption.
Possible values include: 'Enabled', 'Disabled'
:type status: str or :class:`TransparentDataEncryptionStatus
<azure.mgmt.sql.models.TransparentDataEncryptionStatus>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`TransparentDataEncryption
<azure.mgmt.sql.models.TransparentDataEncryption>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.TransparentDataEncryption(status=status)
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/transparentDataEncryption/current'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'TransparentDataEncryption')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TransparentDataEncryption', response)
if response.status_code == 201:
deserialized = self._deserialize('TransparentDataEncryption', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_transparent_data_encryption_configuration(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a database's transparent data encryption configuration.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which the
transparent data encryption applies.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`TransparentDataEncryption
<azure.mgmt.sql.models.TransparentDataEncryption>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/transparentDataEncryption/current'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TransparentDataEncryption', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_transparent_data_encryption_activity(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns a database's transparent data encryption operation result.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which the
transparent data encryption applies.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`TransparentDataEncryptionActivityPaged
<azure.mgmt.sql.models.TransparentDataEncryptionActivityPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/transparentDataEncryption/current/operationResults'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.TransparentDataEncryptionActivityPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.TransparentDataEncryptionActivityPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_threat_detection_policy(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a database's threat detection policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which database
Threat Detection policy is defined.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabaseSecurityAlertPolicy
<azure.mgmt.sql.models.DatabaseSecurityAlertPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/securityAlertPolicies/{securityAlertPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'securityAlertPolicyName': self._serialize.url("self.security_alert_policy_name", self.security_alert_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DatabaseSecurityAlertPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_threat_detection_policy(
self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database's threat detection policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which database
Threat Detection policy is defined.
:type database_name: str
:param parameters: The database Threat Detection policy.
:type parameters: :class:`DatabaseSecurityAlertPolicy
<azure.mgmt.sql.models.DatabaseSecurityAlertPolicy>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabaseSecurityAlertPolicy
<azure.mgmt.sql.models.DatabaseSecurityAlertPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/securityAlertPolicies/{securityAlertPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'securityAlertPolicyName': self._serialize.url("self.security_alert_policy_name", self.security_alert_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DatabaseSecurityAlertPolicy')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DatabaseSecurityAlertPolicy', response)
if response.status_code == 201:
deserialized = self._deserialize('DatabaseSecurityAlertPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_backup_long_term_retention_policy(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns a database backup long term retention policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`BackupLongTermRetentionPolicy
<azure.mgmt.sql.models.BackupLongTermRetentionPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/backupLongTermRetentionPolicies/{backupLongTermRetentionPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'backupLongTermRetentionPolicyName': self._serialize.url("self.backup_long_term_retention_policy_name", self.backup_long_term_retention_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupLongTermRetentionPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_long_term_retention_policy(
self, resource_group_name, server_name, database_name, state, recovery_services_backup_policy_resource_id, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database backup long term retention policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database
:type database_name: str
:param state: The status of the backup long term retention policy.
Possible values include: 'Disabled', 'Enabled'
:type state: str or :class:`BackupLongTermRetentionPolicyState
<azure.mgmt.sql.models.BackupLongTermRetentionPolicyState>`
:param recovery_services_backup_policy_resource_id: The azure recovery
services backup protection policy resource id
:type recovery_services_backup_policy_resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`BackupLongTermRetentionPolicy
<azure.mgmt.sql.models.BackupLongTermRetentionPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.BackupLongTermRetentionPolicy(state=state, recovery_services_backup_policy_resource_id=recovery_services_backup_policy_resource_id)
api_version = "2014-04-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/backupLongTermRetentionPolicies/{backupLongTermRetentionPolicyName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'backupLongTermRetentionPolicyName': self._serialize.url("self.backup_long_term_retention_policy_name", self.backup_long_term_retention_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'BackupLongTermRetentionPolicy')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupLongTermRetentionPolicy', response)
if response.status_code == 201:
deserialized = self._deserialize('BackupLongTermRetentionPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_usages(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Returns database usages.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabaseUsagePaged
<azure.mgmt.sql.models.DatabaseUsagePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2014-04-01"
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/usages'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DatabaseUsagePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DatabaseUsagePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_blob_auditing_policy(
self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config):
"""Gets a database's blob auditing policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which the blob
audit policy is defined.
:type database_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabaseBlobAuditingPolicy
<azure.mgmt.sql.models.DatabaseBlobAuditingPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2015-05-01-preview"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/auditingSettings/{blobAuditingPolicyName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("self.blob_auditing_policy_name", self.blob_auditing_policy_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DatabaseBlobAuditingPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_blob_auditing_policy(
self, resource_group_name, server_name, database_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a database's blob auditing policy.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database for which the blob
auditing policy will be defined.
:type database_name: str
:param parameters: The database blob auditing policy.
:type parameters: :class:`DatabaseBlobAuditingPolicy
<azure.mgmt.sql.models.DatabaseBlobAuditingPolicy>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DatabaseBlobAuditingPolicy
<azure.mgmt.sql.models.DatabaseBlobAuditingPolicy>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2015-05-01-preview"
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/auditingSettings/{blobAuditingPolicyName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("self.blob_auditing_policy_name", self.blob_auditing_policy_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DatabaseBlobAuditingPolicy')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DatabaseBlobAuditingPolicy', response)
if response.status_code == 201:
deserialized = self._deserialize('DatabaseBlobAuditingPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| 48.191757
| 224
| 0.665753
| 15,257
| 141,491
| 5.955365
| 0.023465
| 0.035626
| 0.033678
| 0.016311
| 0.949527
| 0.943958
| 0.939699
| 0.929123
| 0.927185
| 0.923003
| 0
| 0.005646
| 0.24398
| 141,491
| 2,935
| 225
| 48.208177
| 0.84376
| 0.276081
| 0
| 0.890897
| 0
| 0.022758
| 0.19282
| 0.104797
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049531
| false
| 0
| 0.009371
| 0
| 0.134538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3e1422cffd114dc771833fc9d5b8d41f33ab053
| 3,249
|
py
|
Python
|
examples/cnn_bert/all_models/BERT/embedding/bert.py
|
Learn-Live/activity_recognition
|
76fa7bcecc3e422f1ea59fd1aaf576669e1248fb
|
[
"Apache-2.0"
] | 1
|
2022-01-10T21:02:50.000Z
|
2022-01-10T21:02:50.000Z
|
examples/cnn_bert/all_models/BERT/embedding/bert.py
|
Learn-Live/activity_recognition
|
76fa7bcecc3e422f1ea59fd1aaf576669e1248fb
|
[
"Apache-2.0"
] | null | null | null |
examples/cnn_bert/all_models/BERT/embedding/bert.py
|
Learn-Live/activity_recognition
|
76fa7bcecc3e422f1ea59fd1aaf576669e1248fb
|
[
"Apache-2.0"
] | null | null | null |
import torch.nn as nn
from .token import TokenEmbedding
from .position import PositionalEmbedding, LearnedPositionalEmbedding2, LearnedPositionalEmbedding, \
LearnedPositionalEmbedding3
from .segment import SegmentEmbedding
class BERTEmbedding(nn.Module):
"""
BERT Embedding which is consisted with under features
1. PositionalEmbedding : adding positional information using sin, cos
sum of all these features are output of BERTEmbedding
"""
def __init__(self, input_dim, max_len, dropout=0.1):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super().__init__()
self.position = PositionalEmbedding(d_model=input_dim, max_len=max_len, freq=64)
self.dropout = nn.Dropout(p=dropout)
def forward(self, sequence):
x = self.position(sequence) + sequence
return self.dropout(x)
class BERTEmbedding3(nn.Module):
"""
BERT Embedding which is consisted with under features
1. PositionalEmbedding : adding positional information using sin, cos
sum of all these features are output of BERTEmbedding
"""
def __init__(self, input_dim, max_len, dropout=0.1):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super().__init__()
self.learnedPosition = LearnedPositionalEmbedding(d_model=input_dim, max_len=max_len)
self.dropout = nn.Dropout(p=dropout)
def forward(self, sequence):
x = self.learnedPosition(sequence) + sequence
return self.dropout(x)
class BERTEmbedding2(nn.Module):
"""
BERT Embedding which is consisted with under features
1. PositionalEmbedding : adding positional information using sin, cos
sum of all these features are output of BERTEmbedding
"""
def __init__(self, input_dim, max_len, dropout=0.1):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super().__init__()
self.learnedPosition = LearnedPositionalEmbedding2(d_model=input_dim, max_len=max_len)
self.dropout = nn.Dropout(p=dropout)
def forward(self, sequence):
x = self.learnedPosition(sequence) + sequence
return self.dropout(x)
class BERTEmbedding4(nn.Module):
"""
BERT Embedding which is consisted with under features
1. PositionalEmbedding : adding positional information using sin, cos
sum of all these features are output of BERTEmbedding
"""
def __init__(self, input_dim, max_len, dropout=0.1):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super().__init__()
self.learnedPosition = LearnedPositionalEmbedding3(d_model=input_dim, max_len=max_len)
self.dropout = nn.Dropout(p=dropout)
def forward(self, sequence):
x = self.learnedPosition(sequence) + sequence
return self.dropout(x)
| 34.56383
| 101
| 0.676516
| 378
| 3,249
| 5.645503
| 0.177249
| 0.033739
| 0.041237
| 0.052484
| 0.823336
| 0.823336
| 0.823336
| 0.805061
| 0.792877
| 0.792877
| 0
| 0.008537
| 0.242844
| 3,249
| 93
| 102
| 34.935484
| 0.858943
| 0.373961
| 0
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.216216
| false
| 0
| 0.108108
| 0
| 0.540541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
373dda1f2869945b705e8c78d8244ccbe4e7e8cf
| 18,300
|
py
|
Python
|
env/Vehicle2.py
|
byq-luo/Lane_change_RL
|
3409238db939e6722441219b4c2dc66033611069
|
[
"MIT"
] | 4
|
2021-03-11T03:05:31.000Z
|
2022-03-22T08:45:20.000Z
|
env/Vehicle2.py
|
byq-luo/Lane_change_RL
|
3409238db939e6722441219b4c2dc66033611069
|
[
"MIT"
] | null | null | null |
env/Vehicle2.py
|
byq-luo/Lane_change_RL
|
3409238db939e6722441219b4c2dc66033611069
|
[
"MIT"
] | 5
|
2021-06-18T11:32:34.000Z
|
2021-12-31T08:10:51.000Z
|
import math
import os, sys
if 'SUMO_HOME' in os.environ:
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
sys.path.append(tools)
print('success')
else:
sys.exit("please declare environment variable 'SUMO_HOME'")
import traci
class Vehicle:
def __init__(self, veh_id, rd):
self.veh_id = veh_id
self.speed = None
self.latSpeed = 0
self.latSpeed_last = 0
self.latAcce = 0
self.acce = 0
self.acce_last = self.acce
self.delta_acce = 0
self.pos = rd.startJunction
self.pos_x = self.pos[0]
self.pos_y = self.pos[1]
self.curr_laneIndex = traci.vehicle.getLaneIndex(self.veh_id)
self.trgt_laneIndex = self.curr_laneIndex
self.orig_laneIndex = self.curr_laneIndex
self.pos_lat = traci.vehicle.getLateralLanePosition(self.veh_id) + (self.curr_laneIndex+0.5)*rd.laneWidth
self.pos_lat_last = self.pos_lat
self.lanePos = traci.vehicle.getLanePosition(self.veh_id)
self.yawAngle = 0
# todo use Vehicle class directly
# self.curr_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.orig_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.trgt_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.trgt_follower = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.curr_leader = {'id': None, 'dis': None, 'speed': None}
# self.orig_leader = {'id': None, 'dis': None, 'speed': None}
# self.orig_follower = {'id': None, 'dis': None, 'speed': None}
# self.trgt_leader = {'id': None, 'dis': None, 'speed': None}
# self.trgt_follower = {'id': None, 'dis': None, 'speed': None}
self.curr_leader = None
self.orig_leader = None
self.orig_follower = None
self.trgt_leader = None
self.trgt_follower = None
self.dis2tgtLane = None
self.dis2entrance = None
self.lcPos = None
self.reward = 0
# not update every step
self.is_ego = 0
self.changeTimes = 0
self.idm_obj = None
traci.vehicle.setLaneChangeMode(veh_id, 256) # 768
def update_info(self, rd, veh_dict):
self.curr_laneIndex = traci.vehicle.getLaneIndex(self.veh_id)
self.speed = traci.vehicle.getSpeed(self.veh_id)
self.acce = traci.vehicle.getAcceleration(self.veh_id)
self.delta_acce = (self.acce - self.acce_last) / 0.1
self.acce_last = self.acce
self.pos_lat_last = self.pos_lat
self.pos_lat = traci.vehicle.getLateralLanePosition(self.veh_id) + (self.curr_laneIndex+0.5)*rd.laneWidth
self.latSpeed_last = self.latSpeed
self.latSpeed = (self.pos_lat - self.pos_lat_last) / 0.1 # 0.1 for time step length
self.latAcce = (self.latSpeed - self.latSpeed_last) / 0.1
self.pos = traci.vehicle.getPosition(self.veh_id)
self.pos_x = self.pos[0]
self.pos_y = self.pos[1]
self.lanePos = traci.vehicle.getLanePosition(self.veh_id)
self.yawAngle = math.atan(self.latSpeed / max(self.speed, 0.00000001))
if self.is_ego == 1:
self.dis2tgtLane = abs((0.5 + self.trgt_laneIndex) * rd.laneWidth - self.pos_lat)
self.dis2entrance = rd.laneLength - self.lanePos
# get current leader information
leader_tuple = traci.vehicle.getLeader(self.veh_id)
if leader_tuple is not None:
if leader_tuple[0] in list(veh_dict.keys()):
self.curr_leader = veh_dict[leader_tuple[0]]
else:
self.curr_leader = None
else:
self.curr_leader = None
# the following 2 all in list [(id1, distance1), (id2, distance2), ...], each tuple is a leader on one lane
# for a single left lane, the list only contains 1 tuple, eg.[(id1, distance1)]
# get target lane leader&follower information
if self.curr_laneIndex > self.trgt_laneIndex:
assert self.curr_laneIndex == self.orig_laneIndex
# original lane leader
self.orig_leader = self.curr_leader
# original lane follower
follower_id = None
min_dis = 100000
for veh_id in traci.lane.getLastStepVehicleIDs(rd.entranceEdgeID+'_'+str(self.curr_laneIndex)):
dis_temp = self.lanePos - veh_dict[veh_id].lanePos
if dis_temp > 0:
if dis_temp < min_dis:
follower_id = veh_id
min_dis = dis_temp
if follower_id is not None:
self.orig_follower = veh_dict[follower_id]
else:
self.orig_follower = None
# target lane leader
leaders_list = traci.vehicle.getNeighbors(self.veh_id, 1+2+0)
if len(leaders_list) != 0:
if leaders_list[0][0] in list(veh_dict.keys()):
self.trgt_leader = veh_dict[leaders_list[0][0]]
else:
self.trgt_leader = None
else:
self.trgt_leader = None
# target lane follower
followers_list = traci.vehicle.getNeighbors(self.veh_id, 1+0+0)
if len(followers_list) != 0:
if followers_list[0][0] in list(veh_dict.keys()):
self.trgt_follower = veh_dict[followers_list[0][0]]
else:
self.trgt_follower = None
else:
self.trgt_follower = None
else:
assert self.curr_laneIndex == self.trgt_laneIndex
# target lane leader
self.trgt_leader = self.curr_leader
# target lane follower
follower_id = None
min_dis = 100000
for veh_id in traci.lane.getLastStepVehicleIDs(rd.entranceEdgeID + '_' + str(self.curr_laneIndex)):
dis_temp = self.lanePos - veh_dict[veh_id].lanePos
if dis_temp > 0:
if dis_temp < min_dis:
follower_id = veh_id
min_dis = dis_temp
if follower_id is not None:
self.trgt_follower = veh_dict[follower_id]
else:
self.trgt_follower = None
# original lane leader
leaders_list = traci.vehicle.getNeighbors(self.veh_id, 0 + 2 + 0)
if len(leaders_list) != 0:
if leaders_list[0][0] in list(veh_dict.keys()):
self.orig_leader = veh_dict[leaders_list[0][0]]
else:
self.orig_leader = None
else:
self.orig_leader = None
# original lane follower
followers_list = traci.vehicle.getNeighbors(self.veh_id, 0 + 0 + 0)
if len(followers_list) != 0:
if followers_list[0][0] in list(veh_dict.keys()):
self.orig_follower = veh_dict[followers_list[0][0]]
else:
self.orig_follower = None
else:
self.orig_follower = None
def updateLongitudinalSpeedIDM(self, action):
"""
use IDM to control vehicle speed
:return:
"""
# cannot acquire vNext, compute longitudinal speed on our own
# determine leader
if action == 0:
leader = self.orig_leader
else:
assert action == 1
leader = self.trgt_leader
# compute acceNext
if leader is not None:
leaderDis = leader.lanePos - self.lanePos
acceNext = self.idm_obj.calc_acce(self.speed, leaderDis, leader.speed)
else:
acceNext = self.idm_obj.calc_acce(self.speed, None, None)
return acceNext
def changeLane(self, cps, tgtlane, rd):
# make compulsory/default lane change, do not respect other vehicles
'''
if tgtlane == 0:
traci.vehicle.setRouteID(self.veh_id, rd.rampExitRouteID)
else:
traci.vehicle.setRouteID(self.veh_id, rd.highwayKeepRouteID)
assert traci.vehicle.isRouteValid(self.veh_id) is True, 'route is not valid'
'''
# set lane change mode
if tgtlane != -1:
if cps is True:
traci.vehicle.setLaneChangeMode(self.veh_id, 0)
# execute lane change with 'changeSublane'
else:
traci.vehicle.setLaneChangeMode(self.veh_id, 1621) # 768:no speed adaption
# traci.vehicle.changeLane(self.veh_id, self.targetLane, 1)
traci.vehicle.changeSublane(self.veh_id, (0.5 + tgtlane) * rd.laneWidth - self.pos_lat)
else:
traci.vehicle.changeSublane(self.veh_id, 0.0)
if self.dis2tgtLane < 0.1:
return True
else:
return False
class Ego:
def __init__(self, veh_id, rd):
self.veh_id = veh_id
self.speed = None
self.latSpeed = 0
self.latSpeed_last = 0
self.latAcce = 0
self.acce = 0
self.acce_last = self.acce
self.delta_acce = 0
self.pos = rd.startJunction
self.pos_x = self.pos[0]
self.pos_y = self.pos[1]
self.curr_laneIndex = traci.vehicle.getLaneIndex(self.veh_id)
self.trgt_laneIndex = self.curr_laneIndex
self.orig_laneIndex = self.curr_laneIndex
self.pos_lat = traci.vehicle.getLateralLanePosition(self.veh_id) + (self.curr_laneIndex+0.5)*rd.laneWidth
self.pos_lat_last = self.pos_lat
self.lanePos = traci.vehicle.getLanePosition(self.veh_id)
self.yawAngle = 0
# todo use Vehicle class directly
# self.curr_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.orig_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.trgt_leader = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.trgt_follower = {'id': None, 'obj':None, 'dis':None, 'speed':None}
# self.curr_leader = {'id': None, 'dis': None, 'speed': None}
# self.orig_leader = {'id': None, 'dis': None, 'speed': None}
# self.orig_follower = {'id': None, 'dis': None, 'speed': None}
# self.trgt_leader = {'id': None, 'dis': None, 'speed': None}
# self.trgt_follower = {'id': None, 'dis': None, 'speed': None}
self.curr_leader = None
self.orig_leader = None
self.orig_follower = None
self.trgt_leader = None
self.trgt_follower = None
self.dis2tgtLane = None
self.dis2entrance = None
self.lcPos = None
self.reward = 0
# not update every step
self.is_ego = 0
self.changeTimes = 0
self.idm_obj = None
traci.vehicle.setLaneChangeMode(veh_id, 256) # 768
def update_info(self, rd, veh_dict):
self.curr_laneIndex = traci.vehicle.getLaneIndex(self.veh_id)
self.speed = traci.vehicle.getSpeed(self.veh_id)
self.acce = traci.vehicle.getAcceleration(self.veh_id)
self.delta_acce = (self.acce - self.acce_last) / 0.1
self.acce_last = self.acce
self.pos_lat_last = self.pos_lat
self.pos_lat = traci.vehicle.getLateralLanePosition(self.veh_id) + (self.curr_laneIndex+0.5)*rd.laneWidth
self.latSpeed_last = self.latSpeed
self.latSpeed = (self.pos_lat - self.pos_lat_last) / 0.1 # 0.1 for time step length
self.latAcce = (self.latSpeed - self.latSpeed_last) / 0.1
self.pos = traci.vehicle.getPosition(self.veh_id)
self.pos_x = self.pos[0]
self.pos_y = self.pos[1]
self.lanePos = traci.vehicle.getLanePosition(self.veh_id)
self.yawAngle = math.atan(self.latSpeed / max(self.speed, 0.00000001))
if self.is_ego == 1:
self.dis2tgtLane = abs((0.5 + self.trgt_laneIndex) * rd.laneWidth - self.pos_lat)
self.dis2entrance = rd.laneLength - self.lanePos
# get current leader information
leader_tuple = traci.vehicle.getLeader(self.veh_id)
if leader_tuple is not None:
if leader_tuple[0] in list(veh_dict.keys()):
self.curr_leader = veh_dict[leader_tuple[0]]
else:
self.curr_leader = None
else:
self.curr_leader = None
# the following 2 all in list [(id1, distance1), (id2, distance2), ...], each tuple is a leader on one lane
# for a single left lane, the list only contains 1 tuple, eg.[(id1, distance1)]
# get target lane leader&follower information
if self.curr_laneIndex > self.trgt_laneIndex:
assert self.curr_laneIndex == self.orig_laneIndex
# original lane leader
self.orig_leader = self.curr_leader
# original lane follower
follower_id = None
min_dis = 100000
for veh_id in traci.lane.getLastStepVehicleIDs(rd.entranceEdgeID+'_'+str(self.curr_laneIndex)):
dis_temp = self.lanePos - veh_dict[veh_id].lanePos
if dis_temp > 0:
if dis_temp < min_dis:
follower_id = veh_id
min_dis = dis_temp
if follower_id is not None:
self.orig_follower = veh_dict[follower_id]
else:
self.orig_follower = None
# target lane leader
leaders_list = traci.vehicle.getNeighbors(self.veh_id, 1+2+0)
if len(leaders_list) != 0:
if leaders_list[0][0] in list(veh_dict.keys()):
self.trgt_leader = veh_dict[leaders_list[0][0]]
else:
self.trgt_leader = None
else:
self.trgt_leader = None
# target lane follower
followers_list = traci.vehicle.getNeighbors(self.veh_id, 1+0+0)
if len(followers_list) != 0:
if followers_list[0][0] in list(veh_dict.keys()):
self.trgt_follower = veh_dict[followers_list[0][0]]
else:
self.trgt_follower = None
else:
self.trgt_follower = None
else:
assert self.curr_laneIndex == self.trgt_laneIndex
# target lane leader
self.trgt_leader = self.curr_leader
# target lane follower
follower_id = None
min_dis = 100000
for veh_id in traci.lane.getLastStepVehicleIDs(rd.entranceEdgeID + '_' + str(self.curr_laneIndex)):
dis_temp = self.lanePos - veh_dict[veh_id].lanePos
if dis_temp > 0:
if dis_temp < min_dis:
follower_id = veh_id
min_dis = dis_temp
if follower_id is not None:
self.trgt_follower = veh_dict[follower_id]
else:
self.trgt_follower = None
# original lane leader
leaders_list = traci.vehicle.getNeighbors(self.veh_id, 0 + 2 + 0)
if len(leaders_list) != 0:
if leaders_list[0][0] in list(veh_dict.keys()):
self.orig_leader = veh_dict[leaders_list[0][0]]
else:
self.orig_leader = None
else:
self.orig_leader = None
# original lane follower
followers_list = traci.vehicle.getNeighbors(self.veh_id, 0 + 0 + 0)
if len(followers_list) != 0:
if followers_list[0][0] in list(veh_dict.keys()):
self.orig_follower = veh_dict[followers_list[0][0]]
else:
self.orig_follower = None
else:
self.orig_follower = None
def updateLongitudinalSpeedIDM(self, action):
"""
use IDM to control vehicle speed
:return:
"""
# cannot acquire vNext, compute longitudinal speed on our own
# determine leader
if action == 0:
leader = self.orig_leader
else:
assert action == 1
leader = self.trgt_leader
# compute acceNext
if leader is not None:
leaderDis = leader.lanePos - self.lanePos
acceNext = self.idm_obj.calc_acce(self.speed, leaderDis, leader.speed)
else:
acceNext = self.idm_obj.calc_acce(self.speed, None, None)
return acceNext
def changeLane(self, cps, tgtlane, rd):
# make compulsory/default lane change, do not respect other vehicles
'''
if tgtlane == 0:
traci.vehicle.setRouteID(self.veh_id, rd.rampExitRouteID)
else:
traci.vehicle.setRouteID(self.veh_id, rd.highwayKeepRouteID)
assert traci.vehicle.isRouteValid(self.veh_id) is True, 'route is not valid'
'''
# set lane change mode
if tgtlane != -1:
if cps is True:
traci.vehicle.setLaneChangeMode(self.veh_id, 0)
# execute lane change with 'changeSublane'
else:
traci.vehicle.setLaneChangeMode(self.veh_id, 1621) # 768:no speed adaption
# traci.vehicle.changeLane(self.veh_id, self.targetLane, 1)
traci.vehicle.changeSublane(self.veh_id, (0.5 + tgtlane) * rd.laneWidth - self.pos_lat)
else:
traci.vehicle.changeSublane(self.veh_id, 0.0)
if self.dis2tgtLane < 0.1:
return True
else:
return False
| 41.685649
| 119
| 0.554863
| 2,171
| 18,300
| 4.506218
| 0.079226
| 0.03271
| 0.044158
| 0.026577
| 0.980476
| 0.980476
| 0.980476
| 0.980476
| 0.980476
| 0.980476
| 0
| 0.020665
| 0.349508
| 18,300
| 439
| 120
| 41.685649
| 0.801159
| 0.182295
| 0
| 0.966997
| 0
| 0
| 0.0055
| 0
| 0
| 0
| 0
| 0.004556
| 0.019802
| 1
| 0.026403
| false
| 0
| 0.009901
| 0
| 0.062706
| 0.0033
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3773940312240caf521e647cb33dde04b7c47d07
| 178
|
py
|
Python
|
summarize/nn/beam_search/coverage_penalizers/__init__.py
|
danieldeutsch/summarize
|
f36a86d58f381ff1f607f356dad3d6ef7b0e0224
|
[
"Apache-2.0"
] | 15
|
2019-11-01T11:49:44.000Z
|
2021-01-19T06:59:32.000Z
|
summarize/nn/beam_search/coverage_penalizers/__init__.py
|
CogComp/summary-cloze
|
b38e3e8c7755903477fd92a4cff27125cbf5553d
|
[
"Apache-2.0"
] | 2
|
2020-03-30T07:54:01.000Z
|
2021-11-15T16:27:42.000Z
|
summarize/nn/beam_search/coverage_penalizers/__init__.py
|
CogComp/summary-cloze
|
b38e3e8c7755903477fd92a4cff27125cbf5553d
|
[
"Apache-2.0"
] | 3
|
2019-12-06T05:57:51.000Z
|
2019-12-11T11:34:21.000Z
|
from summarize.nn.beam_search.coverage_penalizers.coverage_penalizer import CoveragePenalizer
from summarize.nn.beam_search.coverage_penalizers.onmt import ONMTCoveragePenalizer
| 59.333333
| 93
| 0.910112
| 21
| 178
| 7.47619
| 0.571429
| 0.165605
| 0.191083
| 0.242038
| 0.547771
| 0.547771
| 0.547771
| 0
| 0
| 0
| 0
| 0
| 0.044944
| 178
| 2
| 94
| 89
| 0.923529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
37a9d8f6dd1438c4f6f40f575d418201b3e9915c
| 9,914
|
py
|
Python
|
HW2Assignment/code/my_hw2.py
|
rpg711/cs383
|
468aaf5a57d758056fc3b503a3b835e58e8cfbff
|
[
"MIT"
] | null | null | null |
HW2Assignment/code/my_hw2.py
|
rpg711/cs383
|
468aaf5a57d758056fc3b503a3b835e58e8cfbff
|
[
"MIT"
] | null | null | null |
HW2Assignment/code/my_hw2.py
|
rpg711/cs383
|
468aaf5a57d758056fc3b503a3b835e58e8cfbff
|
[
"MIT"
] | null | null | null |
from sudoku import Sudoku
from copy import deepcopy
from collections import deque
from time import time
class CSP_Solver(object):
"""
This class is used to solve the CSP with backtracking.
"""
def __init__(self, puzzle_file):
self.sudoku = Sudoku(puzzle_file)
self.guesses = 0
self.domain = deepcopy(self.sudoku.board)
self.arcs = []
self.unassigned = deque()
self.assigned = {}
for row in range(9):
for col in range(9):
val = self.sudoku.board[row][col]
if val == 0: # if val is empty, domain is every possible number
self.domain[row][col] = [x for x in range(1,10)]
self.unassigned.append((row,col))
else:
self.domain[row][col] = val
self.assigned[(row,col)] = val
################################################################
### YOU MUST EDIT THIS FUNCTION!!!!!
### We will test your code by constructing a csp_solver instance
### e.g.,
### csp_solver = CSP_Solver('puz-001.txt')
### solved_board, num_guesses = csp_solver.solve()
### so your `solve' method must return these two items.
################################################################
def solve(self):
"""
Solves the Sudoku CSP and returns a list of lists representation
of the solved sudoku puzzle as well as the number of guesses
(assignments) required to solve the problem.
YOU MUST EDIT THIS FUNCTION!!!!!
"""
self.backtracking_search()
print(self.sudoku.board_str())
return self.sudoku.board, self.guesses
def consistent(self, var, value):
# check if given (x,y) is consistent with current board
# check row consistency
for col in [x for x in range(9) if x != var[1]]:
if self.sudoku.board[var[0]][col] == value:
return False
# check col consistency
for row in [x for x in range(9) if x != var[0]]:
if self.sudoku.board[row][var[1]] == value:
return False
# check box consistency
# define starting indices of box for var
start_row = var[0]//3 * 3
start_col = var[1]//3 * 3
for row in [x for x in range(start_row, start_row + 3) if x != var[0]]:
for col in [x for x in range(start_col, start_col + 3) if x != var[1]]:
if self.sudoku.board[row][col] == value:
return False
return True
def select_unassigned_var(self, assignment):
for row in range(9):
for col in range(9):
val = self.sudoku.board[row][col]
if val == 0: # if val is empty, domain is every possible number
return (row,col)
def order_domain_values(self, var, assignment):
return self.domain[var[0]][var[1]]
def backtracking_search(self):
return self.recursive_backtracking(self.assigned)
def recursive_backtracking(self, assignment):
if self.sudoku.complete():
return assignment
var = self.select_unassigned_var(assignment)
for value in self.order_domain_values(var, assignment):
self.guesses += 1
if self.consistent(var, value):
assignment[var] = value
self.sudoku.board[var[0]][var[1]] = value
result = self.recursive_backtracking(assignment)
if result is not None:
return result
assignment.pop(var) # get rid of assignment
self.sudoku.board[var[0]][var[1]] = 0
return None
class CSP_Solver_MRV(object):
"""
This class is used to solve the CSP with backtracking.
"""
def __init__(self, puzzle_file):
self.sudoku = Sudoku(puzzle_file)
self.guesses = 0
self.domain = deepcopy(self.sudoku.board)
self.arcs = []
self.unassigned = deque()
self.assigned = {}
for row in range(9):
for col in range(9):
val = self.sudoku.board[row][col]
if val == 0: # if val is empty, domain is every possible number
self.domain[row][col] = [x for x in range(1,10)]
self.unassigned.append((row,col))
else:
self.domain[row][col] = val
self.assigned[(row,col)] = val
################################################################
### YOU MUST EDIT THIS FUNCTION!!!!!
### We will test your code by constructing a csp_solver instance
### e.g.,
### csp_solver = CSP_Solver('puz-001.txt')
### solved_board, num_guesses = csp_solver.solve()
### so your `solve' method must return these two items.
################################################################
def solve(self):
"""
Solves the Sudoku CSP and returns a list of lists representation
of the solved sudoku puzzle as well as the number of guesses
(assignments) required to solve the problem.
YOU MUST EDIT THIS FUNCTION!!!!!
"""
self.backtracking_search()
print(self.sudoku.board_str())
return self.sudoku.board, self.guesses
def consistent(self, var, value):
# check if given (x,y) is consistent with current board
# check row consistency
for col in [x for x in range(9) if x != var[1]]:
if self.sudoku.board[var[0]][col] == value:
return False
# check col consistency
for row in [x for x in range(9) if x != var[0]]:
if self.sudoku.board[row][var[1]] == value:
return False
# check box consistency
# define starting indices of box for var
start_row = var[0]//3 * 3
start_col = var[1]//3 * 3
for row in [x for x in range(start_row, start_row + 3) if x != var[0]]:
for col in [x for x in range(start_col, start_col + 3) if x != var[1]]:
if self.sudoku.board[row][col] == value:
return False
return True
def free_vals(self, var):
# given unassigned var, return number of free values in domain
# and set domain for that value
# go through row
tempdomain = [x for x in range(1,10)]
for col in [x for x in range(9) if x != var[1]]:
if self.sudoku.board[var[0]][col]!= 0 and \
self.sudoku.board[var[0]][col] in tempdomain:
tempdomain.remove(self.sudoku.board[var[0]][col])
# go through col
for row in [x for x in range(9) if x != var[0]]:
if self.sudoku.board[row][var[1]]!= 0 and \
self.sudoku.board[row][var[1]] in tempdomain:
tempdomain.remove(self.sudoku.board[row][var[1]])
# go through box
# define starting indices of box for var
start_row = var[0]//3 * 3
start_col = var[1]//3 * 3
for row in [x for x in range(start_row, start_row + 3) if x != var[0]]:
for col in [x for x in range(start_col, start_col + 3) if x != var[1]]:
if self.sudoku.board[row][col] != 0 and \
self.sudoku.board[row][col] in tempdomain:
tempdomain.remove(self.sudoku.board[row][col])
return len(tempdomain), tempdomain
def select_unassigned_var(self, assignment):
"""
for row in range(9):
for col in range(9):
val = self.sudoku.board[row][col]
if val == 0: # if val is empty, domain is every possible number
return (row,col)
"""
leastval = None
leastdomain = []
nleast = 9
for row in range(9):
for col in range(9):
val = self.sudoku.board[row][col]
if val == 0 :
fv, ld = self.free_vals((row,col))
if fv < nleast:
nleast = fv
leastval = (row,col)
leastdomain = ld
return leastval, leastdomain
def backtracking_search(self):
return self.recursive_backtracking(self.assigned)
def recursive_backtracking(self, assignment):
if self.sudoku.complete():
return assignment
var, ld = self.select_unassigned_var(assignment)
for value in ld:
self.guesses += 1
if self.consistent(var, value):
assignment[var] = value
self.sudoku.board[var[0]][var[1]] = value
result = self.recursive_backtracking(assignment)
if result is not None:
return result
assignment.pop(var) # get rid of assignment
self.sudoku.board[var[0]][var[1]] = 0
return None
if __name__ == '__main__':
csp_solver_mrv = CSP_Solver_MRV('puz-001.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-001-solved-mrv.txt')
csp_solver_mrv = CSP_Solver_MRV('puz-026.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-026-solved-mrv.txt')
csp_solver_mrv = CSP_Solver_MRV('puz-051.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-051-solved-mrv.txt')
csp_solver_mrv = CSP_Solver_MRV('puz-076.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-076-solved-mrv.txt')
csp_solver_mrv = CSP_Solver_MRV('puz-090.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-090-solved-mrv.txt')
csp_solver_mrv = CSP_Solver_MRV('puz-100.txt')
print(csp_solver_mrv.solve())
#csp_solver_mrv.sudoku.write('puz-100-solved-mrv.txt')
| 34.785965
| 83
| 0.547509
| 1,295
| 9,914
| 4.093436
| 0.10888
| 0.057725
| 0.08489
| 0.050934
| 0.893982
| 0.893416
| 0.877948
| 0.870025
| 0.827202
| 0.827202
| 0
| 0.02049
| 0.320658
| 9,914
| 284
| 84
| 34.908451
| 0.766592
| 0.220395
| 0
| 0.745342
| 0
| 0
| 0.010234
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.024845
| 0.018634
| 0.26087
| 0.049689
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37b53377c137ee09232cca8bb4894bcecacb86c0
| 131,184
|
py
|
Python
|
Phase 4/Insurance_Company.py
|
Rutvij-1/Panic.c
|
cc93b1b224f283257c6bd45fbb1a93ae88e427d1
|
[
"MIT"
] | null | null | null |
Phase 4/Insurance_Company.py
|
Rutvij-1/Panic.c
|
cc93b1b224f283257c6bd45fbb1a93ae88e427d1
|
[
"MIT"
] | null | null | null |
Phase 4/Insurance_Company.py
|
Rutvij-1/Panic.c
|
cc93b1b224f283257c6bd45fbb1a93ae88e427d1
|
[
"MIT"
] | null | null | null |
import subprocess as sp
import pymysql
import pymysql.cursors
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from datetime import datetime
def CheckAadhar(aadhar_no):
for c in aadhar_no:
if c < '0' or c > '9':
print("Failed to insert into database")
print(
">>>>>>>>>>>>>Aadhar Number should only consist of digits(from 1 to 9)")
return False
if len(aadhar_no) != 12:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Aadhar Number should only have 12 digits")
return False
return True
def CheckZipCode(zip_code):
for c in zip_code:
if c < '0' or c > '9':
print("Failed to insert into database")
print(
">>>>>>>>>>>>>Zip Code should only consist of digits(from 1 to 9)")
return False
if len(zip_code) > 6:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Zip Code cannot have more than 6 digits")
return False
return True
def CheckCustomerStatus(customer_status):
if customer_status == "Platinum":
return True
if customer_status == "Gold":
return True
if customer_status == "Silver":
return True
if customer_status == "Bronze":
return True
if customer_status == "Normal":
return True
else:
return False
def CheckEmailID(email):
pos_at = -1
pos_dot = -1
valid_email = True
n = len(email)
for i in range(n):
if (email[i] == '@'):
pos_at = i
if (email[i] == '.' and pos_at != -1):
if (i-pos_at) == 1:
valid_email = False
pos_dot = i
if (email[0] == '@' or email[n-1] == '.' or pos_at == -1 or pos_dot == -1):
valid_email = False
return valid_email
def CheckContacts(number):
for c in number:
if not (c >= '0' and c <= '9') and c != '+' and c != '-':
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid character in the number %s", number)
return False
if len(number) < 7 or len(number) > 14:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Contact Number format")
return False
return True
def GetAge(dob):
pdob = parse(dob)
if pdob > datetime.now():
return -1
age = relativedelta(datetime.now(), pdob).years
return age
def CheckTPAId(tpaid):
if len(tpaid) != 9:
print("Failed to insert into database")
print(">>>>>>>>>>>>>TPA id has format <first 4 numbers>-<second 4 numbers>")
return False
if tpaid[4] != '-':
print("Failed to insert into database")
print(">>>>>>>>>>>>>TPA id has format <first 4 numbers>-<second 4 numbers>")
return False
for c in tpaid:
if c == '-':
continue
if c < '0' or c > '9':
print("Failed to insert into database")
print(">>>>>>>>>>>>>TPA id has format <first 4 numbers>-<second 4 numbers>")
return False
return True
def CheckPolicyId(policy_id):
if len(policy_id) != 11:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Policy id has format <first 4 numbers/alphabets>#<second 6 numbers>")
return False
if policy_id[4] != '#':
print("Failed to insert into database")
print(">>>>>>>>>>>>>Policy id has format <first 4 numbers/alphabets>#<second 6 numbers>")
return False
for i in range(0, 4):
if ((policy_id[i] < 'a' or policy_id[i] > 'z')
and (policy_id[i] < 'A' or policy_id[i] > 'Z')
and (policy_id[i] < '0' or policy_id[i] > '9')):
print("Failed to insert into database")
print(
">>>>>>>>>>>>>Policy id has format <first 4 numbers/alphabets>#<second 6 numbers>")
return False
for i in range(5, 11):
if policy_id[i] < '0' or policy_id[i] > '9':
print("Failed to insert into database")
print(
">>>>>>>>>>>>>Policy id has format <first 4 numbers/alphabets>#<second 6 numbers>")
return False
return True
def AddCustomer():
try:
# Takes emplyee details as input
row = {}
print("Enter new customer's details: ")
row["aadhar_no"] = input("Aadhar Number: ")
if not CheckAadhar(row["aadhar_no"]):
return
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
row["age"] = GetAge(row["date_of_birth"])
if row["age"] == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["email_id"] = input("Email ID: ")
if not CheckEmailID(row["email_id"]):
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Email id format")
return
row["customer_status"] = input(
"Customer Status(Platinum, Gold, Silver, Bronze, Normal): ")
if not CheckCustomerStatus(row["customer_status"]):
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Customer Status")
return
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
row["contacts"] = (
input("Customer contact numbers (space seperated): ")).split(' ')
query = "INSERT INTO Customer VALUES('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (
row["aadhar_no"], row["date_of_birth"], row["first_name"], row["middle_name"], row["surname"],
row["email_id"], row["customer_status"], row["street_address"], row["zip_code"], row["city"], row["state"])
cur.execute(query)
query = "INSERT INTO Customer_Age VALUES(%d, '%s')" % (
row["age"], row["aadhar_no"])
cur.execute(query)
for number in row["contacts"]:
if CheckContacts(number):
query = "INSERT INTO Customer_Contact VALUES('%s', '%s')" % (
number, row["aadhar_no"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddCustomerDependant():
try:
# Takes emplyee details as input
row = {}
print("Enter new customer dependant's details: ")
row["aadhar_no"] = (
input("Aadhar Numbers of the Customers they are dependant on: ")).split(' ')
for number in row["aadhar_no"]:
if not CheckAadhar(number):
return
query = "SELECT * FROM Customer WHERE aadhar_no = '%s'" % (number)
cur.execute(query)
if len(cur.fetchall()) == 0:
print("Failed to insert into database")
print(
">>>>>>>>>>>>>Customer with Aadhar Number %s does not exist" % (number))
return
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
row["age"] = GetAge(row["date_of_birth"])
if row["age"] == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
query = "INSERT INTO Customer_Dependant VALUES('%s', '%s', '%s', '%s')" % (
row["first_name"], row["middle_name"], row["surname"], row["date_of_birth"])
cur.execute(query)
query = "INSERT INTO Customer_Dependant_Age VALUES('%s', '%s', '%s', %d)" % (
row["first_name"], row["middle_name"], row["surname"], row["age"])
cur.execute(query)
for number in row["aadhar_no"]:
query = "INSERT INTO Depends_On VALUES('%s', '%s', '%s', '%s')" % (
row["first_name"], row["middle_name"], row["surname"], number)
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddEmployee():
try:
row = {}
print("Enter new employee's details: ")
row["dept_no"] = int(input("Department Number: "))
row["sno"] = int(input("Serial Number: "))
row["aadhar_no"] = input("Aadhar Number: ")
if not CheckAadhar(row["aadhar_no"]):
return
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
row["age"] = GetAge(row["date_of_birth"])
name = (input("Name (Fname Minit Lname): ")).split(' ')
if row["age"] == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["email_id"] = input("Email ID: ")
if not CheckEmailID(row["email_id"]):
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Email id format")
return
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
row["contacts"] = (
input("Employee contact numbers (space seperated): ")).split(' ')
sdept_no = input(
"Supervisor Department Number (Leave blank if no supervisor): ")
ssno = input(
"Supervisor Serial Number (Leave blank if no supervisor): ")
if sdept_no == "":
row["sdept_no"] = "NULL"
else:
flag = True
for c in sdept_no:
if c < '0' or c > '9':
flag = False
if flag:
row["sdept_no"] = sdept_no
else:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Supervisor Department Number")
return
if ssno == "":
row["ssno"] = "NULL"
else:
flag = True
for c in ssno:
if c < '0' or c > '9':
flag = False
if flag:
row["ssno"] = ssno
else:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Supervisor Serial Number")
return
query = "INSERT INTO Employee VALUES(%d, %d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s, %s)" % (
row["dept_no"], row["sno"], row["aadhar_no"], row["date_of_birth"], row["first_name"], row["middle_name"], row["surname"],
row["email_id"], row["street_address"], row["zip_code"], row["city"], row["state"], row["sdept_no"], row["ssno"])
cur.execute(query)
query = "INSERT INTO Employee_Age VALUES(%d, %d, %d)" % (
row["age"], row["sno"], row["dept_no"])
cur.execute(query)
for number in row["contacts"]:
if CheckContacts(number):
query = "INSERT INTO Employee_Contact VALUES('%s', %d, %d)" % (
number, row["sno"], row["dept_no"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddEmployeeDependant():
try:
row = {}
print("Enter new employee dependant's details: ")
row["dept_no"] = (
input("Department Numbers of the Employees who provide them: ")).split(' ')
row["sno"] = (
input("Serial Numbers of the Employees who provide them: ")).split(' ')
if len(row["dept_no"]) != len(row["sno"]):
print("Failed to insert into database")
print(">>>>>>>>>>>>>Unequal number of Employees")
return
for i in range(len(row["sno"])):
query = "SELECT * FROM Employee WHERE department_no = %d AND serial_no = %d" % (
int(row["dept_no"][i]), int(row["sno"][i]))
cur.execute(query)
if len(cur.fetchall()) == 0:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Employee with Department Number %d and Serial Number %d does not exist" % (
int(row["dept_no"][i]), int(row["sno"][i])))
return
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
row["age"] = GetAge(row["date_of_birth"])
if row["age"] == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
query = "INSERT INTO Employee_Dependant VALUES('%s', '%s', '%s', '%s')" % (
row["first_name"], row["middle_name"], row["surname"], row["date_of_birth"])
cur.execute(query)
query = "INSERT INTO Employee_Dependant_Age VALUES(%d, '%s', '%s', '%s')" % (
row["age"], row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
for i in range(len(row["sno"])):
query = "INSERT INTO Provided_By VALUES(%d, %d, '%s', '%s', '%s')" % (
int(row["dept_no"][i]), int(row["sno"][i]), row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddPolicy():
try:
row = {}
print("Enter new policy's details: ")
row["policy_id"] = input("Policy Id: ")
if not CheckPolicyId(row["policy_id"]):
return ""
row["tnc"] = input(
"Terms and Conditions (Put '-' to seperate between paragraphs): ")
row["date_of_issue"] = input("Date of Issue (YYYY-MM-DD): ")
if GetAge(row["date_of_issue"]) == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Issue")
return ""
row["duration"] = int(input("Duration in months: "))
row["premium"] = float(input("Premium value: "))
row["sum_assured"] = float(input("Sum assured: "))
row["dept_no"] = int(
input("Department Number of Employee issuing policy: "))
row["sno"] = int(input("Serial Number of Employee issuing policy: "))
row["aadhar_no"] = input("Aadhar Number of Customer buying policy: ")
if not CheckAadhar(row["aadhar_no"]):
return ""
query = "INSERT INTO Policy VALUES('%s', '%s', '%s', %d, %f, %f, '%s', %d, %d)" % (
row["policy_id"], row["tnc"], row["date_of_issue"], row["duration"],
row["premium"], row["sum_assured"], row["aadhar_no"], row["dept_no"], row["sno"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
return row["policy_id"]
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return ""
def AddLifeInsurance(policy_id):
if policy_id == "":
return
try:
row = {}
row["dvb"] = float(input("Death Value Benefit Amount: "))
row["history"] = input(
"Medical History (Put '-' to seperate between paragraphs): ")
row["beneficiaries"] = (
input("Names of Beneficiaries (space seperated): ")).split(' ')
query = "INSERT INTO Life VALUES('%s', %f, '%s')" % (
policy_id, row["dvb"], row["history"])
cur.execute(query)
for name in row["beneficiaries"]:
query = "INSERT INTO Beneficiaries VALUES('%s', '%s')" % (
policy_id, name)
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddVehicleInsurance(policy_id):
if policy_id == "":
return
try:
row = {}
row["license_plate_no"] = input("License Plate Number of Car: ")
row["customer_license_no"] = input("License Number of Customer: ")
row["colour"] = (input(
"Colours of Car (space seperated, and use '-' where there is a space in name of colour): ")).split(' ')
query = "INSERT INTO Vehicle VALUES('%s', '%s')" % (
policy_id, row["license_plate_no"])
cur.execute(query)
query = "INSERT INTO Customer_License_No VALUES('%s', '%s')" % (
policy_id, row["customer_license_no"])
cur.execute(query)
for col in row["colour"]:
query = "INSERT INTO Vehicle_Colours VALUES('%s', '%s')" % (
policy_id, col)
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddMedicalInsurance(policy_id):
if policy_id == "":
return
try:
row = {}
row["dvb"] = float(input("Death Value Benefit Amount: "))
row["cashless_hospitals"] = (input(
"Cashless Hospitals (space seperated, and use '-' where there is a space in the name): ")).split(' ')
row["condition"] = (input(
"Medical conditions covered (space seperated, and use '-' where there is a space in name of condition): ")).split(' ')
query = "INSERT INTO Medical VALUES('%s', %f)" % (
policy_id, row["dvb"])
cur.execute(query)
for name in row["cashless_hospitals"]:
query = "INSERT INTO Cashless_Hospitals VALUES('%s','%s')" % (
policy_id, name)
cur.execute(query)
for name in row["condition"]:
query = "INSERT INTO Conditions_Covered VALUES('%s','%s')" % (
policy_id, name)
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddHouseInsurance(policy_id):
if policy_id == "":
return
try:
row = {}
row["replacement_cost"] = float(input("Replacement Cost Value: "))
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
query = "INSERT INTO House VALUES('%s', %f, '%s', '%s', '%s', '%s')" % (
policy_id, row["replacement_cost"], row["street_address"], row["zip_code"], row["city"],
row["state"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddTravelInsurance(policy_id):
if policy_id == "":
return
try:
row = {}
row["itenerary"] = input("Iternerary: ")
row["airline_and_hotel_bookings"] = input(
"Airline and Hotel Details: ")
row["destination"] = (input(
"Travel Destinations (space seperated, and use '-' where there is a space in name of destination): ")).split(' ')
query = "INSERT INTO Travel VALUES('%s', '%s', '%s')" % (
policy_id, row["itenerary"], row["airline_and_hotel_bookings"])
cur.execute(query)
for dest in row["destination"]:
query = "INSERT INTO Travel_Destinations VALUES('%s', '%s')" % (
policy_id, dest)
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def AddTPA():
try:
row = {}
print("Enter new TPA's details: ")
row["TPA_id"] = input("TPA's Id: ")
if not CheckTPAId(row["TPA_id"]):
return
row["TPA_name"] = input("TPA's Name: ")
row["street_address"] = input("TPA's Street Address: ")
row["zip_code"] = input("Zip code of TPA's Address: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City of TPA's Address: ")
row["state"] = input("State of TPA's Address: ")
row["contact_number"] = (
input("TPA's Contact Numbers (space seperated): ")).split(' ')
row["type"] = (input(
"TPA's Investigation Types (space seperated, and use '-' where there is a space in name of type): ")).split(' ')
query = "INSERT INTO TPA VALUES('%s', '%s', '%s', '%s', '%s', '%s')" % (
row["TPA_id"], row["TPA_name"], row["street_address"], row["zip_code"], row["city"],
row["state"])
cur.execute(query)
for number in row["contact_number"]:
if CheckContacts(number):
query = "INSERT INTO TPA_Contact_Info VALUES('%s', '%s')" % (
number, row["TPA_id"])
cur.execute(query)
for types in row["type"]:
query = "INSERT INTO TPA_Investigations_Conducted VALUES('%s', '%s')" % (
types, row["TPA_id"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def ResloveClaim():
try:
row = {}
print("Enter claimed policy's details")
row["TPA_id"] = input("Monitoring TPA's Id: ")
if not CheckTPAId(row["TPA_id"]):
return
row["dept_no"] = int(
input("Department Number of the Employee who issued the policy: "))
row["sno"] = int(
input("Serial Number of the Employee who issued the policy: "))
row["policy_id"] = input("Id of the policy being claimed: ")
if not CheckPolicyId(row["policy_id"]):
return
row["aadhar_no"] = input(
"Aadhar Number of the customer who bought the policy being claimed: ")
if not CheckAadhar(row["aadhar_no"]):
return
row["report"] = input(
"Report On Policy claim (Put '-' to seperate between paragraphs): ")
row["date_of_claim"] = input("Date of Claim (YYYY-MM-DD): ")
if GetAge(row["date_of_claim"]) == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of claim")
return
query = "INSERT INTO Resolves_Claims VALUES('%s', %d, %d, '%s', '%s')" % (
row["TPA_id"], row["dept_no"], row["sno"], row["policy_id"],
row["aadhar_no"])
cur.execute(query)
query = "INSERT INTO Claim_Report VALUES('%s', '%s')" % (
row["policy_id"], row["report"])
cur.execute(query)
query = "INSERT INTO Claim_Date VALUES('%s', '%s')" % (
row["policy_id"], row["date_of_claim"])
cur.execute(query)
con.commit()
print("Inserted Into Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
def UpdateCustomerInfo():
try:
row = {}
row["aadhar_no"] = input("Enter Aadhar Number of the Customer: ")
query = "SELECT * FROM Customer WHERE aadhar_no = '%s'" % (
row["aadhar_no"])
cur.execute(query)
record = cur.fetchall()
print()
if len(record) == 0:
print("No customer with this Aadhar number found")
return
print("Enter Customer's updated details: ")
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["email_id"] = input("Email ID: ")
if not CheckEmailID(row["email_id"]):
print("Failed to update database")
print(">>>>>>>>>>>>>Invalid Email id format")
return
row["customer_status"] = input(
"Customer Status(Platinum, Gold, Silver, Bronze, Normal): ")
if not CheckCustomerStatus(row["customer_status"]):
print("Failed to update database")
print(">>>>>>>>>>>>>Invalid Customer Status")
return
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
row["contacts"] = (
input("Customer contact numbers (space seperated): ")).split(' ')
row["age"] = GetAge(row["date_of_birth"])
if row["age"] == -1:
print("Failed to update database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
query = "UPDATE Customer SET date_of_birth = '%s', first_name = '%s', middle_name = '%s', surname = '%s', email_id = '%s', customer_status = '%s', street_address = '%s', zip_code = '%s', city = '%s', state = '%s' WHERE aadhar_no = '%s'" % (
row["date_of_birth"], row["first_name"], row["middle_name"], row["surname"], row["email_id"], row["customer_status"], row["street_address"], row["zip_code"], row["city"], row["state"], row["aadhar_no"])
cur.execute(query)
query = "UPDATE Customer_Age SET age = %d WHERE customer_aadhar_no = '%s' " % (
row["age"], row["aadhar_no"])
cur.execute(query)
query = "DELETE FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
row["aadhar_no"])
cur.execute(query)
for number in row["contacts"]:
if CheckContacts(number):
query = "INSERT INTO Customer_Contact VALUES('%s', '%s')" % (
number, row["aadhar_no"])
cur.execute(query)
con.commit()
print("Updated Database")
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def UpdateCustomerDependentInfo():
try:
row = {}
print("Enter customer dependant's details: ")
row["aadhar_no"] = (
input("Aadhar Numbers of the Customer they are dependant on (space seperated): ")).split(" ")
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
record1 = {}
f = 1
for i in row["aadhar_no"]:
query = "SELECT * from Depends_On WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s' AND customer_aadhar_no = '%s'" % (
row["first_name"], row["middle_name"], row["surname"], i)
cur.execute(query)
record1 = cur.fetchall()
if len(record1) == 0:
f = 0
print()
query = "SELECT * from Customer_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
record2 = cur.fetchall()
print()
query = "SELECT * from Customer_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
record3 = cur.fetchall()
print()
if f == 0 or len(record2) == 0 or len(record3) == 0:
print("No such record found")
return
info = {}
info["aadhar_no"] = row["aadhar_no"]
print("Enter updated Customer Dependant Information")
name = (input("Name (Fname Minit Lname): ")).split(' ')
info["first_name"] = name[0]
info["middle_name"] = name[1]
info["surname"] = name[2]
info["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
info["age"] = GetAge(info["date_of_birth"])
for i in row["aadhar_no"]:
query = "DELETE FROM Depends_On WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s' AND customer_aadhar_no = '%s'" % (
row["first_name"], row["middle_name"], row["surname"], i)
cur.execute(query)
print()
query = "DELETE FROM Customer_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
print()
query = "DELETE FROM Customer_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
print()
query = "INSERT INTO Customer_Dependant VALUES('%s', '%s', '%s', '%s')" % (
info["first_name"], info["middle_name"], info["surname"], info["date_of_birth"])
cur.execute(query)
print()
query = "INSERT INTO Customer_Dependant_Age VALUES('%s', '%s', '%s', %d)" % (
info["first_name"], info["middle_name"], info["surname"], info["age"])
cur.execute(query)
print()
for i in row["aadhar_no"]:
query = "INSERT INTO Depends_On VALUES('%s', '%s', '%s', '%s')" % (
info["first_name"], info["middle_name"], info["surname"], i)
cur.execute(query)
print()
con.commit()
print("Updated Database")
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def UpdateEmployeeInfo():
try:
row = {}
print("Enter employee's details: ")
row["dept_no"] = int(input("Department Number: "))
row["sno"] = int(input("Serial Number: "))
query = "SELECT * from Employee WHERE department_no = %d AND serial_no = %d" % (
row["dept_no"], row["sno"])
cur.execute(query)
record = cur.fetchall()
print()
if len(record) == 0:
print("No employee with this department number and serial found")
return
print("Enter update Details")
row["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
row["email_id"] = input("Email ID: ")
if not CheckEmailID(row["email_id"]):
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Email id format")
return
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
row["contacts"] = (
input("Employee contact numbers (space seperated): ")).split(' ')
row["age"] = GetAge(row["date_of_birth"])
if row["age"] == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
sdept_no = input(
"Supervisor Department Number (Leave blank if no supervisor): ")
ssno = input(
"Supervisor Serial Number (Leave blank if no supervisor): ")
if sdept_no == "":
row["sdept_no"] = "NULL"
else:
flag = True
for c in sdept_no:
if c < '0' or c > '9':
flag = False
if flag:
row["sdept_no"] = sdept_no
else:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Supervisor Department Number")
return
if ssno == "":
row["ssno"] = "NULL"
else:
flag = True
for c in ssno:
if c < '0' or c > '9':
flag = False
if flag:
row["ssno"] = ssno
else:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Supervisor Serial Number")
return
query = "UPDATE Employee SET date_of_birth = '%s', first_name = '%s', middle_name = '%s', surname = '%s', email_id = '%s', street_address = '%s', zip_code = '%s', city = '%s', state = '%s', sup_department_no = %s, sup_serial_no = %s WHERE department_no = %d AND serial_no = %d" % (
row["date_of_birth"], row["first_name"], row["middle_name"], row["surname"], row["email_id"], row["street_address"], row["zip_code"], row["city"], row["state"], row["sdept_no"], row["ssno"], row["dept_no"], row["sno"])
cur.execute(query)
query = "UPDATE Employee_Age SET age = %d WHERE employee_serial_no = %d AND employee_department_no = %d" % (
row["age"], row["sno"], row["dept_no"])
cur.execute(query)
query = "DELETE FROM Employee_Contact WHERE employee_serial_no = %d AND employee_department_no = %d" % (
row["sno"], row["dept_no"])
cur.execute(query)
for number in row["contacts"]:
if CheckContacts(number):
query = "INSERT INTO Employee_Contact VALUES('%s', %d, %d)" % (
number, row["sno"], row["dept_no"])
cur.execute(query)
con.commit()
print("Updated Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def UpdateEmployeeDependantInfo():
try:
row = {}
print("Enter employee dependant's details: ")
row["dept_no"] = (
input("Department Numbers of the Employees who provide them: ")).split(' ')
row["sno"] = (
input("Serial Numbers of the Employees who provide them: ")).split(' ')
name = (input("Name (Fname Minit Lname): ")).split(' ')
row["first_name"] = name[0]
row["middle_name"] = name[1]
row["surname"] = name[2]
record1 = {}
f = 1
for i in range(len(row["dept_no"])):
query = "SELECT * from Provided_By WHERE employee_department_no = %d AND employee_serial_no = %d AND dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
int(row["dept_no"][i]), int(row["sno"][i]), row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
record1 = cur.fetchall()
if len(record1) == 0:
f = 0
print()
query = "SELECT * from Employee_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
record2 = cur.fetchall()
print()
query = "SELECT * from Employee_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
record3 = cur.fetchall()
print()
if f == 0 or len(record2) == 0 or len(record3) == 0:
print("No such record found")
return
info = {}
info["sno"] = row["sno"]
info["dept_no"] = row["dept_no"]
print("Enter updated Employee Dependant Information")
name = (input("Name (Fname Minit Lname): ")).split(' ')
info["first_name"] = name[0]
info["middle_name"] = name[1]
info["surname"] = name[2]
info["date_of_birth"] = input("Birth Date (YYYY-MM-DD): ")
info["age"] = GetAge(info["date_of_birth"])
if info["age"] == -1:
print("Failed to update database")
print(">>>>>>>>>>>>>Invalid Date of Birth")
return
for i in range(len(row["dept_no"])):
query = "DELETE FROM Provided_By WHERE employee_department_no = %d AND employee_serial_no = %d AND dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
int(row["dept_no"][i]), int(row["sno"][i]), row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
query = "DELETE FROM Employee_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
print()
query = "DELETE FROM Employee_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
row["first_name"], row["middle_name"], row["surname"])
cur.execute(query)
print()
query = "INSERT INTO Employee_Dependant VALUES('%s', '%s', '%s', '%s')" % (
info["first_name"], info["middle_name"], info["surname"], info["date_of_birth"])
cur.execute(query)
query = "INSERT INTO Employee_Dependant_Age VALUES(%d, '%s', '%s', '%s')" % (
info["age"], info["first_name"], info["middle_name"], info["surname"])
cur.execute(query)
for i in range(len(row["sno"])):
query = "INSERT INTO Provided_By VALUES(%d, %d, '%s', '%s', '%s')" % (int(info["dept_no"][i]), int(
info["sno"][i]), info["first_name"], info["middle_name"], info["surname"])
cur.execute(query)
con.commit()
except Exception as e:
con.rollback()
print("Failed to update database")
print(">>>>>>>>>>>>>", e)
return
def UpdatePolicy():
try:
row = {}
row["policy_id"] = input("Enter Policy Id: ")
query = "SELECT * from Policy WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
record = cur.fetchall()
print()
if len(record) == 0:
print("No policy with this id found")
return
print("Enter new policy's details")
row["tnc"] = input(
"Terms and Conditions (Put '-' to seperate between paragraphs): ")
row["date_of_issue"] = input("Date of Issue (YYYY-MM-DD): ")
if GetAge(row["date_of_issue"]) == -1:
print("Failed to insert into database")
print(">>>>>>>>>>>>>Invalid Date of Issue")
return
row["duration"] = int(input("Duration in months: "))
row["premium"] = float(input("Premium value: "))
row["sum_assured"] = float(input("Sum assured: "))
row["dept_no"] = int(
input("Department Number of Employee issuing policy: "))
row["sno"] = int(input("Serial Number of Employee issuing policy: "))
row["aadhar_no"] = input("Aadhar Number of Customer buying policy: ")
if not CheckAadhar(row["aadhar_no"]):
return
query = "UPDATE Policy SET terms_and_conditions = '%s', date_of_issue = '%s', durantion_in_months = %d, premium_value = %f, sum_assured = %f, customer_aadhar_no = '%s', employee_department_no = %d, employee_serial_no = %d WHERE policy_id = '%s'" % (
row["tnc"], row["date_of_issue"], row["duration"], row["premium"], row["sum_assured"], row["aadhar_no"], row["dept_no"], row["sno"], row["policy_id"])
cur.execute(query)
query = "SELECT * FROM Life WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
lifet = cur.fetchall()
query = "SELECT * FROM Medical WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
medt = cur.fetchall()
query = "SELECT * FROM Vehicle WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
veht = cur.fetchall()
query = "SELECT * FROM House WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
hset = cur.fetchall()
query = "SELECT * FROM Travel WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
trat = cur.fetchall()
if len(lifet) > 0:
query = "DELETE FROM Beneficiaries WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
print("Enter new Life Insurance policy's details")
row["dvb"] = float(input("Death Value Benefit Amount: "))
row["history"] = input(
"Medical History (Put '-' to seperate between paragraphs): ")
row["beneficiaries"] = (
input("Names of Beneficiaries (space seperated): ")).split(' ')
query = "UPDATE Life SET death_value_benefit = %f, medical_history = '%s' WHERE policy_id = '%s'" % (
row["dvb"], row["history"], row["policy_id"])
cur.execute(query)
for name in row["beneficiaries"]:
query = "INSERT INTO Beneficiaries VALUES('%s', '%s')" % (
row["policy_id"], name)
cur.execute(query)
elif len(medt) > 0:
query = "DELETE FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
query = "DELETE FROM Conditions_Covered WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
print("Enter new Medical Insurance policy's details")
row["dvb"] = float(input("Death Value Benefit Amount: "))
row["cashless_hospitals"] = (input(
"Cashless Hospitals (space seperated, and use '-' where there is a space in the name): ")).split(' ')
row["condition"] = (input(
"Medical conditions covered (space seperated, and use '-' where there is a space in name of condition): ")).split(' ')
query = "UPDATE Medical SET death_value_benefit = %f WHERE policy_id = '%s'" % (
row["dvb"], row["policy_id"])
cur.execute(query)
for name in row["cashless_hospitals"]:
query = "INSERT INTO Cashless_Hospitals VALUES('%s','%s')" % (
row["policy_id"], name)
cur.execute(query)
for name in row["condition"]:
query = "INSERT INTO Conditions_Covered VALUES('%s','%s')" % (
row["policy_id"], name)
cur.execute(query)
elif len(veht) > 0:
query = "DELETE FROM Vehicle_Colours WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
print("Enter new Vehicle Insurance policy's details")
row["license_plate_no"] = input("License Plate Number of Car: ")
row["customer_license_no"] = input("License Number of Customer: ")
row["colour"] = (input(
"Colours of Car (space seperated, and use '-' where there is a space in name of colour): ")).split(' ')
query = "UPDATE Vehicle SET license_plate_no = '%s' WHERE policy_id = '%s'" % (
row["license_plate_no"], row["policy_id"])
cur.execute(query)
query = "UPDATE Customer_License_No SET customer_license_no = '%s' WHERE policy_id = '%s'" % (
row["customer_license_no"], row["policy_id"])
cur.execute(query)
for col in row["colour"]:
query = "INSERT INTO Vehicle_Colours VALUES('%s', '%s')" % (
row["policy_id"], col)
cur.execute(query)
elif len(hset) > 0:
print("Enter new House Insurance policy's details")
row["replacement_cost"] = float(input("Replacement Cost Value: "))
row["street_address"] = input("Street Address: ")
row["zip_code"] = input("Zip Code: ")
if not CheckZipCode(row["zip_code"]):
return
row["city"] = input("City: ")
row["state"] = input("State: ")
query = "UPDATE House SET replacement_cost = %f, street_address = '%s', zip_code = '%s', city = '%s', state = '%s' WHERE policy_id = '%s'" % (
row["replacement_cost"], row["street_address"], row["zip_code"], row["city"], row["state"], row["policy_id"])
cur.execute(query)
elif len(trat) > 0:
query = "DELETE FROM Travel_Destinations WHERE policy_id = '%s'" % (
row["policy_id"])
cur.execute(query)
print("Enter new Travel Insurance policy's details")
row["itenerary"] = input("Iternerary: ")
row["airline_and_hotel_bookings"] = input(
"Airline and Hotel Details: ")
row["destination"] = (input(
"Travel Destinations (space seperated, and use '-' where there is a space in name of destination): ")).split(' ')
query = "UPDATE Travel SET itenerary = '%s', airline_and_hotel_bookings = '%s' WHERE policy_id = '%s'" % (
row["itenerary"], row["airline_and_hotel_bookings"], row["policy_id"])
cur.execute(query)
for dest in row["destination"]:
query = "INSERT INTO Travel_Destinations VALUES('%s', '%s')" % (
row["policy_id"], dest)
cur.execute(query)
else:
print(">>>>>>>>>>>>>Policy with id %s not of any type" %
(row["policy_id"]))
return
con.commit()
print("Updated Database")
except Exception as e:
con.rollback()
print("Failed to update database")
print(">>>>>>>>>>>>>", e)
return
def UpdateTPAinfo():
try:
row = {}
print("Enter the TPA's details: ")
row["TPA_id"] = input("TPA's Id: ")
if not CheckTPAId(row["TPA_id"]):
return
query = "SELECT * from TPA WHERE TPA_id = '%s'" % (row["TPA_id"])
cur.execute(query)
record = cur.fetchall()
print()
info = {}
if len(record) == 0:
print("No employee with this department number and serial found")
return
info["TPA_id"] = row["TPA_id"]
info["TPA_name"] = input("TPA's Name: ")
info["street_address"] = input("TPA's Street Address: ")
info["zip_code"] = input("Zip code of TPA's Address: ")
if not CheckZipCode(info["zip_code"]):
return
info["city"] = input("City of TPA's Address: ")
info["state"] = input("State of TPA's Address: ")
info["contact_number"] = (
input("TPA's Contact Numbers (space seperated): ")).split(' ')
info["type"] = (input(
"TPA's Investigation Types (space seperated, and use '-' where there is a space in name of type): ")).split(' ')
query = "UPDATE TPA SET TPA_name = '%s', street_address = '%s' ,zip_code = '%s', city = '%s', state = '%s' WHERE TPA_id = '%s'" % (
info["TPA_name"], info["street_address"], info["zip_code"], info["city"], info["state"], info["TPA_id"])
cur.execute(query)
query = "DELETE FROM TPA_Contact_Info WHERE TPA_id = '%s'" % (
info["TPA_id"])
cur.execute(query)
for number in info["contact_number"]:
if CheckContacts(number):
query = "INSERT INTO TPA_Contact_Info VALUES('%s', '%s')" % (
number, info["TPA_id"])
cur.execute(query)
query = "DELETE FROM TPA_Investigations_Conducted WHERE TPA_id = '%s'" % (
info["TPA_id"])
cur.execute(query)
for types in info["type"]:
query = "INSERT INTO TPA_Investigations_Conducted VALUES('%s', '%s')" % (
types, info["TPA_id"])
cur.execute(query)
con.commit()
print("Updated Database")
except Exception as e:
con.rollback()
print("Failed to insert into database")
print(">>>>>>>>>>>>>", e)
return
def DeletePolicy(policy_id):
try:
if policy_id == "":
policy_id = input("Enter Id of the policy you want to delete: ")
if not CheckPolicyId(policy_id):
return
query = "DELETE FROM Beneficiaries WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Conditions_Covered WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Customer_License_No WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Vehicle_Colours WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Travel_Destinations WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Claim_Date WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Claim_Report WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Life WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Medical WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Vehicle WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM House WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Travel WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Resolves_Claims WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
query = "DELETE FROM Policy WHERE policy_id = '%s'" % (
policy_id)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def DeleteCustomerDependant(fname, mname, lname):
try:
if fname == None and mname == None and lname == None:
name = (
input("Name of Dependant to Delete (Fname Minit Lname): ")).split(' ')
fname = name[0]
mname = name[1]
lname = name[2]
query = "DELETE FROM Customer_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
query = "DELETE FROM Depends_On WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
query = "DELETE FROM Customer_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def DeleteEmployeeDependant(fname, mname, lname):
try:
if fname == None and mname == None and lname == None:
name = (
input("Name of Dependant to Delete (Fname Minit Lname): ")).split(' ')
fname = name[0]
mname = name[1]
lname = name[2]
query = "DELETE FROM Employee_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
query = "DELETE FROM Provided_By WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
query = "DELETE FROM Employee_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
fname, mname, lname)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def DeleteCustomer():
try:
aadhar_no = input(
"Enter Aadhar Number of the Customer you want to delete: ")
if not CheckAadhar(aadhar_no):
return
query = "SELECT * FROM Policy WHERE customer_aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
result = cur.fetchall()
for line in result:
DeletePolicy(line["policy_id"])
query = "DELETE FROM Customer_Age WHERE customer_aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
query = "DELETE FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
query = "SELECT * FROM Depends_On WHERE customer_aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
result = cur.fetchall()
query = "DELETE FROM Depends_On WHERE customer_aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
for line in result:
query = "SELECT * FROM Depends_On WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
temp = cur.fetchall()
if len(temp) == 0:
DeleteCustomerDependant(
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
query = "DELETE FROM Customer WHERE aadhar_no = '%s'" % (
aadhar_no)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def DeleteEmployee():
try:
dno = int(input(
"Enter Department Number of the Employee you want to delete: "))
sno = int(input(
"Enter Serial Number of the Employee you want to delete: "))
query = "SELECT * FROM Policy WHERE employee_department_no = %d AND employee_serial_no = %d" % (
dno, sno)
cur.execute(query)
result = cur.fetchall()
for line in result:
DeletePolicy(line["policy_id"])
query = "DELETE FROM Employee_Age WHERE employee_department_no = %d AND employee_serial_no = %d" % (
dno, sno)
cur.execute(query)
query = "DELETE FROM Employee_Contact WHERE employee_department_no = %d AND employee_serial_no = %d" % (
dno, sno)
cur.execute(query)
query = "SELECT * FROM Provided_By WHERE employee_department_no = %d AND employee_serial_no = %d" % (
dno, sno)
cur.execute(query)
result = cur.fetchall()
query = "DELETE FROM Provided_By WHERE employee_department_no = %d AND employee_serial_no = %d" % (
dno, sno)
cur.execute(query)
for line in result:
query = "SELECT * FROM Provided_By WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
temp = cur.fetchall()
if len(temp) == 0:
DeleteEmployeeDependant(
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
query = "SELECT * FROM Employee WHERE sup_department_no = %d AND sup_serial_no = %d" % (
dno, sno)
cur.execute(query)
result = cur.fetchall()
for line in result:
query = "UPDATE Employee SET sup_department_no = NULL, sup_serial_no = NULL WHERE department_no = %d AND serial_no = %d" % (
line["department_no"], line["serial_no"])
cur.execute(query)
query = "DELETE FROM Employee WHERE department_no = %d AND serial_no = %d" % (
dno, sno)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def DeleteTPA():
try:
tpaid = input(
"Enter Id of the Third Part Administrator you want to delete: ")
if not CheckTPAId(tpaid):
return
query = "SELECT * FROM Resolves_Claims WHERE TPA_id = '%s'" % (
tpaid)
cur.execute(query)
result = cur.fetchall()
for line in result:
DeletePolicy(line["policy_id"])
query = "DELETE FROM TPA_Contact_Info WHERE TPA_id = '%s'" % (
tpaid)
cur.execute(query)
query = "DELETE FROM TPA_Investigations_Conducted WHERE TPA_id = '%s'" % (
tpaid)
cur.execute(query)
query = "DELETE FROM TPA WHERE TPA_id = '%s'" % (
tpaid)
cur.execute(query)
con.commit()
print("Deleted from Database")
except Exception as e:
con.rollback()
print("Failed to delete from database")
print(">>>>>>>>>>>>>", e)
return
def GetAllCustomers():
try:
row = {}
print("Enter customer's status: ")
row["customer_status"] = input(
"Customer Status(Platinum, Gold, Silver, Bronze, Normal): ")
query = "SELECT * FROM Customer WHERE customer_status = '%s'" % (
row["customer_status"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = []
age = 0
query = "SELECT * FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums.append(x["contact_number"])
query = "SELECT * FROM Customer_Age WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
age = x["age"]
break
print("%d:" % (cntr))
print("Aadhar Number: ", line["aadhar_no"])
print("Name: %s %s %s" %
(line["first_name"], line["middle_name"], line["surname"]))
print("Date of Birth: ", line["date_of_birth"])
print("Age: ", age)
print("Email id: ", line["email_id"])
print("Contact Numbers: ", cnums)
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetAllPoliciesOfCustomers():
try:
row = {}
print("Enter customer's Aadhar Number: ")
row["aadhar_no"] = input("Aadhar number: ")
query = "SELECT * FROM Policy WHERE customer_aadhar_no = '%s'" % (
row["aadhar_no"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
top = 0
query = "SELECT * FROM Life WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lifet = cur.fetchall()
query = "SELECT * FROM Medical WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
medt = cur.fetchall()
query = "SELECT * FROM Vehicle WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
veht = cur.fetchall()
query = "SELECT * FROM House WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hset = cur.fetchall()
query = "SELECT * FROM Travel WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
trat = cur.fetchall()
life = {}
med = {}
vehicle = {}
house = {}
travel = {}
if len(lifet) > 0:
for x in lifet:
life = x.copy()
break
top = 1
elif len(medt) > 0:
for x in medt:
med = x.copy()
break
top = 2
elif len(veht) > 0:
for x in veht:
vehicle = x.copy()
break
top = 3
elif len(hset) > 0:
for x in hset:
house = x.copy()
break
top = 4
elif len(trat) > 0:
for x in trat:
travel = x.copy()
break
top = 5
if top == 0:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Policy with id %s not of any type" %
(line["policy_id"]))
continue
print("%d:" % (cntr))
print("Policy id: ", line["policy_id"])
print("Terms and Conditions")
tnc = line["terms_and_conditions"].split('-')
for s in tnc:
print(s)
print("Date of Issue: ", line["date_of_issue"])
print("Duration: %d months" % (line["durantion_in_months"]))
print("Premium: ", line["premium_value"])
print("Sum assured: ", line["sum_assured"])
print("Department number of issuing employee: ",
line["employee_department_no"])
print("Serial number of issuing employee: ",
line["employee_serial_no"])
if top == 1:
query = "SELECT * FROM Beneficiaries WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
ben = cur.fetchall()
names = ""
for x in ben:
names += (", %s" % (x["name_of_beneficiary"]))
print("Death Value Benefit: ", life["death_value_benefit"])
print("Medical History of Customer: ")
mh = life["medical_history"].split('-')
for s in mh:
print("\t", s)
if len(names) > 0:
print("Names of beneficiaries of Customer: ", names[2:])
else:
print(
"Names of beneficiaries of Customer: No beneficiaries mentioned")
elif top == 2:
query = "SELECT * FROM Conditions_Covered WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
cond = cur.fetchall()
cond_names = ""
for x in cond:
words = x["name_of_condition"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
cond_names += (", %s" % (s))
query = "SELECT * FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hosp = cur.fetchall()
hosp_names = ""
for x in hosp:
words = x["name_of_hospital"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
hosp_names += (", %s" % (s))
print("Death Value Benefit: ", med["death_value_benefit"])
if len(cond_names) > 0:
print("Conditions Covered by Policy: ", cond_names[2:])
else:
print("Conditions Covered by Policy: No conditions mentioned")
if len(hosp_names) > 0:
print(
"Cashless Hospitals having tie-ups with Policy: ", hosp_names[2:])
else:
print(
"Cashless Hospitals having tie-ups with Policy: No hospitals mentioned")
elif top == 3:
query = "SELECT * FROM Vehicle_Colours WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
colt = cur.fetchall()
col_names = ""
for x in colt:
words = x["colour"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
col_names += (", %s" % (s))
query = "SELECT * FROM Customer_License_No WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lnt = cur.fetchall()
ln = ""
for x in lnt:
ln = x["customer_license_no"]
break
print("License Plate Number: ", vehicle["license_plate_no"])
if len(col_names) > 0:
print("Colours of Vehicle: ", col_names[2:])
else:
print("Colours of Vehicle: No colours mentioned")
print("Customer License Number: ", ln)
elif top == 4:
print("Replacement Cost of House: ", house["replacement_cost"])
print("House Address: %s, %s-%s, %s" %
(house["street_address"], house["city"], house["zip_code"], house["state"]))
elif top == 5:
query = "SELECT * FROM Travel_Destinations WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
destt = cur.fetchall()
dest_names = ""
for x in destt:
words = x["destination"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
dest_names += (", %s" % (s))
print("Itenerary of Travel: ", travel["itenerary"])
print("Airline and Hotel Booking of Travel: ",
travel["airline_and_hotel_bookings"])
if len(dest_names) > 0:
print("Travel Destinations: ", dest_names[2:])
else:
print("Travel Destinations: No destinations mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetAllDependentsOfCustomers():
try:
row = {}
print("Enter customer's Aadhar Number")
row["aadhar_number"] = input("Aadhar Number of Customer: ")
query = "SELECT * FROM Depends_On WHERE customer_aadhar_no = '%s'" % (
row["aadhar_number"])
cur.execute(query)
dependants = cur.fetchall()
print()
cntr = 1
for line in dependants:
query = "SELECT * FROM Customer_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
dept = cur.fetchall()
query = "SELECT * FROM Customer_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
depaget = cur.fetchall()
dep = {}
age = 0
for x in dept:
dep = x.copy()
break
for x in depaget:
age = x["age"]
break
print("%d:" % (cntr))
print("Name: %s %s %s" %
(dep["first_name"], dep["middle_name"], dep["surname"]))
print("Date of Birth: ", dep["date_of_birth"])
print("Age: ", age)
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetAllDependentsOfEmployee():
try:
row = {}
print("Enter employee's Info")
row["dept_no"] = int(
input("Department Number of the Employee: "))
row["sno"] = int(
input("Serial Number of the Employee: "))
query = "SELECT * FROM Provided_By WHERE employee_department_no = %d AND employee_serial_no = %d" % (
row["dept_no"], row["sno"])
cur.execute(query)
dependants = cur.fetchall()
print()
cntr = 1
for line in dependants:
query = "SELECT * FROM Employee_Dependant WHERE first_name = '%s' AND middle_name = '%s' AND surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
dept = cur.fetchall()
query = "SELECT * FROM Employee_Dependant_Age WHERE dependant_first_name = '%s' AND dependant_middle_name = '%s' AND dependant_surname = '%s'" % (
line["dependant_first_name"], line["dependant_middle_name"], line["dependant_surname"])
cur.execute(query)
depaget = cur.fetchall()
dep = {}
age = 0
for x in dept:
dep = x.copy()
break
for x in depaget:
age = x["age"]
break
print("%d:" % (cntr))
print("Name: %s %s %s" %
(dep["first_name"], dep["middle_name"], dep["surname"]))
print("Date of Birth: ", dep["date_of_birth"])
print("Age: ", age)
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetAllTPAsInCity():
try:
row = {}
print("Enter TPA's info")
row["city"] = input("City (Case sensitive): ")
query = "SELECT * FROM TPA WHERE city = '%s'" % (row["city"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = ""
types = ""
query = "SELECT * FROM TPA_Contact_Info WHERE TPA_id = '%s'" % (
line["TPA_id"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums += ", "
cnums += x["contact_number"]
query = "SELECT * FROM TPA_Investigations_Conducted WHERE TPA_id = '%s'" % (
line["TPA_id"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
types += ", "
types += x["type"]
print("%d:" % (cntr))
print("TPA Id: ", line["TPA_id"])
print("Name: ", line["TPA_name"])
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
if len(cnums) > 0:
print("Contact Numbers: ", cnums[2:])
else:
print("Contact Numbers: No numbers mentioned")
if len(types) > 0:
print("Types of Investigations Conducted: ", types[2:])
else:
print("Types of Investigations Conducted: No types mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetInsurancePolicies():
try:
row = {}
print("Enter policy types (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = (
input("Policy types (space seperated and case sensitive): ")).split(' ')
policy_ids = set()
for sub in row["subclass"]:
result = tuple()
if sub == "Life":
query = "SELECT policy_id FROM Life"
cur.execute(query)
result = cur.fetchall()
elif sub == "Medical":
query = "SELECT policy_id FROM Medical"
cur.execute(query)
result = cur.fetchall()
elif sub == "Vehicle":
query = "SELECT policy_id FROM Vehicle"
cur.execute(query)
result = cur.fetchall()
elif sub == "House":
query = "SELECT policy_id FROM House"
cur.execute(query)
result = cur.fetchall()
elif sub == "Travel":
query = "SELECT policy_id FROM Travel"
cur.execute(query)
result = cur.fetchall()
else:
print(">>>>>>>>>>>>>Invalid type of policy")
for line in result:
policy_ids.add(line["policy_id"])
print()
cntr = 1
for pid in policy_ids:
query = "SELECT * FROM Policy WHERE policy_id = '%s'" % (pid)
cur.execute(query)
result = cur.fetchall()
line = {}
for x in result:
line = x.copy()
break
top = 0
query = "SELECT * FROM Life WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lifet = cur.fetchall()
query = "SELECT * FROM Medical WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
medt = cur.fetchall()
query = "SELECT * FROM Vehicle WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
veht = cur.fetchall()
query = "SELECT * FROM House WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hset = cur.fetchall()
query = "SELECT * FROM Travel WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
trat = cur.fetchall()
life = {}
med = {}
vehicle = {}
house = {}
travel = {}
if len(lifet) > 0:
for x in lifet:
life = x.copy()
break
top = 1
elif len(medt) > 0:
for x in medt:
med = x.copy()
break
top = 2
elif len(veht) > 0:
for x in veht:
vehicle = x.copy()
break
top = 3
elif len(hset) > 0:
for x in hset:
house = x.copy()
break
top = 4
elif len(trat) > 0:
for x in trat:
travel = x.copy()
break
top = 5
if top == 0:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Policy with id %s not of any type" %
(line["policy_id"]))
continue
print("%d:" % (cntr))
print("Policy id: ", line["policy_id"])
print("Terms and Conditions")
tnc = line["terms_and_conditions"].split('-')
for s in tnc:
print(s)
print("Date of Issue: ", line["date_of_issue"])
print("Duration: %d months" % (line["durantion_in_months"]))
print("Premium: ", line["premium_value"])
print("Sum assured: ", line["sum_assured"])
print("Department number of issuing employee: ",
line["employee_department_no"])
print("Serial number of issuing employee: ",
line["employee_serial_no"])
if top == 1:
query = "SELECT * FROM Beneficiaries WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
ben = cur.fetchall()
names = ""
for x in ben:
names += (", %s" % (x["name_of_beneficiary"]))
print("Death Value Benefit: ", life["death_value_benefit"])
print("Medical History of Customer: ")
mh = life["medical_history"].split('-')
for s in mh:
print("\t", s)
if len(names) > 0:
print("Names of beneficiaries of Customer: ", names[2:])
else:
print(
"Names of beneficiaries of Customer: No beneficiaries mentioned")
elif top == 2:
query = "SELECT * FROM Conditions_Covered WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
cond = cur.fetchall()
cond_names = ""
for x in cond:
words = x["name_of_condition"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
cond_names += (", %s" % (s))
query = "SELECT * FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hosp = cur.fetchall()
hosp_names = ""
for x in hosp:
words = x["name_of_hospital"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
hosp_names += (", %s" % (s))
print("Death Value Benefit: ", med["death_value_benefit"])
if len(cond_names) > 0:
print("Conditions Covered by Policy: ", cond_names[2:])
else:
print("Conditions Covered by Policy: No conditions mentioned")
if len(hosp_names) > 0:
print(
"Cashless Hospitals having tie-ups with Policy: ", hosp_names[2:])
else:
print(
"Cashless Hospitals having tie-ups with Policy: No hospitals mentioned")
elif top == 3:
query = "SELECT * FROM Vehicle_Colours WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
colt = cur.fetchall()
col_names = ""
for x in colt:
words = x["colour"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
col_names += (", %s" % (s))
query = "SELECT * FROM Customer_License_No WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lnt = cur.fetchall()
ln = ""
for x in lnt:
ln = x["customer_license_no"]
break
print("License Plate Number: ", vehicle["license_plate_no"])
if len(col_names) > 0:
print("Colours of Vehicle: ", col_names[2:])
else:
print("Colours of Vehicle: No colours mentioned")
print("Customer License Number: ", ln)
elif top == 4:
print("Replacement Cost of House: ", house["replacement_cost"])
print("House Address: %s, %s-%s, %s" %
(house["street_address"], house["city"], house["zip_code"], house["state"]))
elif top == 5:
query = "SELECT * FROM Travel_Destinations WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
destt = cur.fetchall()
dest_names = ""
for x in destt:
words = x["destination"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
dest_names += (", %s" % (s))
print("Itenerary of Travel: ", travel["itenerary"])
print("Airline and Hotel Booking of Travel: ",
travel["airline_and_hotel_bookings"])
if len(dest_names) > 0:
print("Travel Destinations: ", dest_names[2:])
else:
print("Travel Destinations: No destinations mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetInsuranceCustomers():
try:
row = {}
print("Enter policy types and status")
print("Policy types - Life, Medical, Vehicle, House, Travel")
print("Status - Platinum, Gold, Silver, Bronze, Normal")
row["subclass"] = (
input("Policy types (space seperated and case sensitive): ")).split(' ')
row["status"] = (
input("Status (space seperated and case sensitive): ")).split(' ')
for stat in row["status"]:
if not CheckCustomerStatus(stat):
print(">>>>>>>>>>>>>%s is not a valid Customer status" % (stat))
policy_ids = set()
for sub in row["subclass"]:
result = tuple()
if sub == "Life":
query = "SELECT policy_id FROM Life"
cur.execute(query)
result = cur.fetchall()
elif sub == "Medical":
query = "SELECT policy_id FROM Medical"
cur.execute(query)
result = cur.fetchall()
elif sub == "Vehicle":
query = "SELECT policy_id FROM Vehicle"
cur.execute(query)
result = cur.fetchall()
elif sub == "House":
query = "SELECT policy_id FROM House"
cur.execute(query)
result = cur.fetchall()
elif sub == "Travel":
query = "SELECT policy_id FROM Travel"
cur.execute(query)
result = cur.fetchall()
else:
print(">>>>>>>>>>>>>Invalid type of policy")
for line in result:
policy_ids.add(line["policy_id"])
aadhar_nos = set()
for pid in policy_ids:
query = "SELECT * FROM Policy WHERE policy_id = '%s'" % (pid)
cur.execute(query)
result = cur.fetchall()
line = {}
for x in result:
line = x.copy()
break
no = line["customer_aadhar_no"]
query = "SELECT * FROM Customer WHERE aadhar_no = '%s'" % (no)
cur.execute(query)
result = cur.fetchall()
line = {}
for x in result:
line = x.copy()
break
if line["customer_status"] in row["status"]:
aadhar_nos.add(no)
print()
cntr = 1
for no in aadhar_nos:
query = "SELECT * FROM Customer WHERE aadhar_no = '%s'" % (no)
cur.execute(query)
result = cur.fetchall()
line = {}
for x in result:
line = x.copy()
break
cnums = []
age = 0
query = "SELECT * FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums.append(x["contact_number"])
query = "SELECT * FROM Customer_Age WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
age = x["age"]
break
print("%d:" % (cntr))
print("Aadhar Number: ", line["aadhar_no"])
print("Name: %s %s %s" %
(line["first_name"], line["middle_name"], line["surname"]))
print("Date of Birth: ", line["date_of_birth"])
print("Age: ", age)
print("Email id: ", line["email_id"])
print("Contact Numbers: ", cnums)
print("Customer Status: ", line["customer_status"])
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetPoliciesWithPremium():
try:
row = {}
print("Enter Range for Premium")
row["min"] = float(input("Minimum Premium: "))
row["max"] = float(input("Maximum Premium: "))
query = "SELECT * FROM Policy WHERE premium_value <= %f AND premium_value >= %f" % (
row["max"], row["min"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
top = 0
query = "SELECT * FROM Life WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lifet = cur.fetchall()
query = "SELECT * FROM Medical WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
medt = cur.fetchall()
query = "SELECT * FROM Vehicle WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
veht = cur.fetchall()
query = "SELECT * FROM House WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hset = cur.fetchall()
query = "SELECT * FROM Travel WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
trat = cur.fetchall()
life = {}
med = {}
vehicle = {}
house = {}
travel = {}
if len(lifet) > 0:
for x in lifet:
life = x.copy()
break
top = 1
elif len(medt) > 0:
for x in medt:
med = x.copy()
break
top = 2
elif len(veht) > 0:
for x in veht:
vehicle = x.copy()
break
top = 3
elif len(hset) > 0:
for x in hset:
house = x.copy()
break
top = 4
elif len(trat) > 0:
for x in trat:
travel = x.copy()
break
top = 5
if top == 0:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Policy with id %s not of any type" %
(line["policy_id"]))
continue
print("%d:" % (cntr))
print("Policy id: ", line["policy_id"])
print("Terms and Conditions")
tnc = line["terms_and_conditions"].split('-')
for s in tnc:
print(s)
print("Date of Issue: ", line["date_of_issue"])
print("Duration: %d months" % (line["durantion_in_months"]))
print("Premium: ", line["premium_value"])
print("Sum assured: ", line["sum_assured"])
print("Department number of issuing employee: ",
line["employee_department_no"])
print("Serial number of issuing employee: ",
line["employee_serial_no"])
if top == 1:
query = "SELECT * FROM Beneficiaries WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
ben = cur.fetchall()
names = ""
for x in ben:
names += (", %s" % (x["name_of_beneficiary"]))
print("Death Value Benefit: ", life["death_value_benefit"])
print("Medical History of Customer: ")
mh = life["medical_history"].split('-')
for s in mh:
print("\t", s)
if len(names) > 0:
print("Names of beneficiaries of Customer: ", names[2:])
else:
print(
"Names of beneficiaries of Customer: No beneficiaries mentioned")
elif top == 2:
query = "SELECT * FROM Conditions_Covered WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
cond = cur.fetchall()
cond_names = ""
for x in cond:
words = x["name_of_condition"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
cond_names += (", %s" % (s))
query = "SELECT * FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hosp = cur.fetchall()
hosp_names = ""
for x in hosp:
words = x["name_of_hospital"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
hosp_names += (", %s" % (s))
print("Death Value Benefit: ", med["death_value_benefit"])
if len(cond_names) > 0:
print("Conditions Covered by Policy: ", cond_names[2:])
else:
print("Conditions Covered by Policy: No conditions mentioned")
if len(hosp_names) > 0:
print(
"Cashless Hospitals having tie-ups with Policy: ", hosp_names[2:])
else:
print(
"Cashless Hospitals having tie-ups with Policy: No hospitals mentioned")
elif top == 3:
query = "SELECT * FROM Vehicle_Colours WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
colt = cur.fetchall()
col_names = ""
for x in colt:
words = x["colour"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
col_names += (", %s" % (s))
query = "SELECT * FROM Customer_License_No WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lnt = cur.fetchall()
ln = ""
for x in lnt:
ln = x["customer_license_no"]
break
print("License Plate Number: ", vehicle["license_plate_no"])
if len(col_names) > 0:
print("Colours of Vehicle: ", col_names[2:])
else:
print("Colours of Vehicle: No colours mentioned")
print("Customer License Number: ", ln)
elif top == 4:
print("Replacement Cost of House: ", house["replacement_cost"])
print("House Address: %s, %s-%s, %s" %
(house["street_address"], house["city"], house["zip_code"], house["state"]))
elif top == 5:
query = "SELECT * FROM Travel_Destinations WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
destt = cur.fetchall()
dest_names = ""
for x in destt:
words = x["destination"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
dest_names += (", %s" % (s))
print("Itenerary of Travel: ", travel["itenerary"])
print("Airline and Hotel Booking of Travel: ",
travel["airline_and_hotel_bookings"])
if len(dest_names) > 0:
print("Travel Destinations: ", dest_names[2:])
else:
print("Travel Destinations: No destinations mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetCustomersOfAge():
try:
row = {}
print("Enter customer's age: ")
row["age"] = int(input("Customer Age: "))
query = "SELECT * FROM Customer WHERE aadhar_no IN (SELECT customer_aadhar_no FROM Customer_Age WHERE age = %d)" % (
row["age"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = []
query = "SELECT * FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums.append(x["contact_number"])
print("%d:" % (cntr))
print("Aadhar Number: ", line["aadhar_no"])
print("Name: %s %s %s" %
(line["first_name"], line["middle_name"], line["surname"]))
print("Date of Birth: ", line["date_of_birth"])
print("Age: ", row["age"])
print("Email id: ", line["email_id"])
print("Contact Numbers: ", cnums)
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetTotalClaim():
try:
row = {}
print("Enter policy types (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = (
input("Policy types (space seperated and case sensitive): ")).split(' ')
sc = set(row["subclass"])
val = {}
for sub in sc:
result = tuple()
if sub == "Life":
query = "SELECT policy_id FROM Life WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Medical":
query = "SELECT policy_id FROM Medical WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Vehicle":
query = "SELECT policy_id FROM Vehicle WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "House":
query = "SELECT policy_id FROM House WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Travel":
query = "SELECT policy_id FROM Travel WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
else:
print(">>>>>>>>>>>>>Invalid type of policy")
continue
value = 0.0
for line in result:
query = "SELECT sum_assured FROM Policy WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
sumt = cur.fetchall()
sumd = {}
for x in sumt:
sumd = x.copy()
break
value += float(sumd["sum_assured"])
val[sub] = value
for key, value in val.items():
print()
print("Total claim of %s Insurance Policies is %f" % (key, value))
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetMaxClaim():
try:
row = {}
print("Enter policy type (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = input(
"Policy types (space seperated and case sensitive): ")
result = tuple()
if row["subclass"] == "Life":
query = "SELECT policy_id FROM Life WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Medical":
query = "SELECT policy_id FROM Medical WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Vehicle":
query = "SELECT policy_id FROM Vehicle WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "House":
query = "SELECT policy_id FROM House WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Travel":
query = "SELECT policy_id FROM Travel WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
else:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Invalid type of policy")
return
val = 0.0
for line in result:
query = "SELECT sum_assured FROM Policy WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
maxt = cur.fetchall()
maxd = {}
for x in maxt:
maxd = x.copy()
break
val = max(val, float(maxd["sum_assured"]))
print("Maximum claim of %s Insurance Policies is %f" %
(row["subclass"], val))
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetMinClaim():
try:
row = {}
print("Enter policy type (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = input(
"Policy types (space seperated and case sensitive): ")
result = tuple()
if row["subclass"] == "Life":
query = "SELECT policy_id FROM Life WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Medical":
query = "SELECT policy_id FROM Medical WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Vehicle":
query = "SELECT policy_id FROM Vehicle WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "House":
query = "SELECT policy_id FROM House WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Travel":
query = "SELECT policy_id FROM Travel WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
else:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Invalid type of policy")
return
val = 1000000000.0
for line in result:
query = "SELECT sum_assured FROM Policy WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
maxt = cur.fetchall()
maxd = {}
for x in maxt:
maxd = x.copy()
break
val = min(val, float(maxd["sum_assured"]))
if len(result) == 0:
val = 0.0
print("Minimum claim of %s Insurance Policies is %f" %
(row["subclass"], val))
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GetAverageClaim():
try:
row = {}
print("Enter policy type (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = input(
"Policy types (space seperated and case sensitive): ")
result = tuple()
if row["subclass"] == "Life":
query = "SELECT policy_id FROM Life WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Medical":
query = "SELECT policy_id FROM Medical WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Vehicle":
query = "SELECT policy_id FROM Vehicle WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "House":
query = "SELECT policy_id FROM House WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif row["subclass"] == "Travel":
query = "SELECT policy_id FROM Travel WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
else:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Invalid type of policy")
return
val = 0.0
for line in result:
query = "SELECT sum_assured FROM Policy WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
maxt = cur.fetchall()
maxd = {}
for x in maxt:
maxd = x.copy()
break
val += float(maxd["sum_assured"])
if len(result) == 0:
val = 0.0
else:
val /= len(result)
print("Average claim of %s Insurance Policies is %f" %
(row["subclass"], val))
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def SearchCustomerByName():
try:
row = {}
print("Enter Customer's info")
row["name"] = input("Name: ")
query = "SELECT * FROM Customer WHERE MATCH (first_name, middle_name, surname) AGAINST ('%s*' IN BOOLEAN MODE)" % (
row["name"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = []
age = 0
query = "SELECT * FROM Customer_Contact WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums.append(x["contact_number"])
query = "SELECT * FROM Customer_Age WHERE customer_aadhar_no = '%s'" % (
line["aadhar_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
age = x["age"]
break
print("%d:" % (cntr))
print("Aadhar Number: ", line["aadhar_no"])
print("Name: %s %s %s" %
(line["first_name"], line["middle_name"], line["surname"]))
print("Date of Birth: ", line["date_of_birth"])
print("Age: ", age)
print("Email id: ", line["email_id"])
print("Contact Numbers: ", cnums)
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
print("Customer Status: ", line["customer_status"])
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def SearchEmployeeByName():
try:
row = {}
print("Enter Employee's info")
row["name"] = input("Name: ")
query = "SELECT * FROM Employee WHERE MATCH (first_name, middle_name, surname) AGAINST ('%s*' IN BOOLEAN MODE)" % (
row["name"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = []
age = 0
query = "SELECT * FROM Employee_Contact WHERE employee_department_no = %d AND employee_serial_no = %d" % (
line["department_no"], line["serial_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums.append(x["contact_number"])
query = "SELECT * FROM Employee_Age WHERE employee_department_no = %d AND employee_serial_no = %d" % (
line["department_no"], line["serial_no"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
age = x["age"]
break
print("%d:" % (cntr))
print("Department Number: ", line["department_no"])
print("Serial Number: ", line["serial_no"])
print("Aadhar Number: ", line["aadhar_no"])
print("Name: %s %s %s" %
(line["first_name"], line["middle_name"], line["surname"]))
print("Date of Birth: ", line["date_of_birth"])
print("Age: ", age)
print("Email id: ", line["email_id"])
print("Contact Numbers: ", cnums)
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
print("Supervisor Department Number: ", line["sup_department_no"])
print("Supervisor Serial Number: ", line["sup_serial_no"])
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def SearchTPAByName():
try:
row = {}
print("Enter TPA's info")
row["name"] = input("Name: ")
query = "SELECT * FROM TPA WHERE MATCH (TPA_name) AGAINST ('%s*' IN BOOLEAN MODE)" % (
row["name"])
cur.execute(query)
result = cur.fetchall()
print()
cntr = 1
for line in result:
cnums = ""
types = ""
query = "SELECT * FROM TPA_Contact_Info WHERE TPA_id = '%s'" % (
line["TPA_id"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
cnums += ", "
cnums += x["contact_number"]
query = "SELECT * FROM TPA_Investigations_Conducted WHERE TPA_id = '%s'" % (
line["TPA_id"])
cur.execute(query)
cc = cur.fetchall()
for x in cc:
types += ", "
types += x["type"]
print("%d:" % (cntr))
print("TPA Id: ", line["TPA_id"])
print("Name: ", line["TPA_name"])
print("Address: %s, %s-%s, %s" %
(line["street_address"], line["city"], line["zip_code"], line["state"]))
if len(cnums) > 0:
print("Contact Numbers: ", cnums[2:])
else:
print("Contact Numbers: No numbers mentioned")
if len(types) > 0:
print("Types of Investigations Conducted: ", types[2:])
else:
print("Types of Investigations Conducted: No types mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GenerateClaimedPolicyReports():
try:
row = {}
print("Enter policy types (Life, Medical, Vehicle, House, Travel)")
row["subclass"] = (
input("Policy types (space seperated and case sensitive): ")).split(' ')
sc = set(row["subclass"])
val = {}
for sub in sc:
result = tuple()
if sub == "Life":
query = "SELECT policy_id FROM Life WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Medical":
query = "SELECT policy_id FROM Medical WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Vehicle":
query = "SELECT policy_id FROM Vehicle WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "House":
query = "SELECT policy_id FROM House WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
elif sub == "Travel":
query = "SELECT policy_id FROM Travel WHERE policy_id IN (SELECT policy_id FROM Resolves_Claims)"
cur.execute(query)
result = cur.fetchall()
else:
print(">>>>>>>>>>>>>Invalid type of policy")
continue
val[sub] = []
for line in result:
val[sub].append(line["policy_id"])
for key, value in val.items():
print("%s Insurance: " % (key))
cntr = 1
for pid in value:
query = "SELECT claim_report FROM Claim_Report WHERE policy_id = '%s'" % (
pid)
cur.execute(query)
result = cur.fetchall()
reportstr = ""
print(result)
for x in result:
reportstr = x["claim_report"]
break
print("%d:" % (cntr))
print("Policy Id: ", pid)
if reportstr != None:
print("Report:")
report = reportstr.split('-')
for s in report:
print(s)
else:
print("Report: No report mentioned")
cntr += 1
print()
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def GenerateCustomerReport():
try:
row = {}
print("Enter customer's Aadhar Number: ")
row["aadhar_no"] = input("Aadhar number: ")
query = "SELECT * FROM Policy WHERE customer_aadhar_no = '%s' AND policy_id IN (SELECT policy_id FROM Resolves_Claims)" % (
row["aadhar_no"])
cur.execute(query)
result = cur.fetchall()
val = 0.0
for line in result:
val += float(line["sum_assured"])
query = "SELECT * FROM Policy WHERE customer_aadhar_no = '%s'" % (
row["aadhar_no"])
cur.execute(query)
result = cur.fetchall()
print()
print("Total amount claimed by customer is ", val)
print()
cntr = 1
for line in result:
top = 0
query = "SELECT * FROM Life WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lifet = cur.fetchall()
query = "SELECT * FROM Medical WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
medt = cur.fetchall()
query = "SELECT * FROM Vehicle WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
veht = cur.fetchall()
query = "SELECT * FROM House WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hset = cur.fetchall()
query = "SELECT * FROM Travel WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
trat = cur.fetchall()
query = "SELECT * FROM Claim_Date WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
claimt = cur.fetchall()
life = {}
med = {}
vehicle = {}
house = {}
travel = {}
doc = datetime.now()
for x in claimt:
doc = x["date_of_claim"]
break
if len(lifet) > 0:
for x in lifet:
life = x.copy()
break
top = 1
elif len(medt) > 0:
for x in medt:
med = x.copy()
break
top = 2
elif len(veht) > 0:
for x in veht:
vehicle = x.copy()
break
top = 3
elif len(hset) > 0:
for x in hset:
house = x.copy()
break
top = 4
elif len(trat) > 0:
for x in trat:
travel = x.copy()
break
top = 5
if top == 0:
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>Policy with id %s not of any type" %
(line["policy_id"]))
continue
print("%d:" % (cntr))
print("Policy id: ", line["policy_id"])
print("Terms and Conditions")
tnc = line["terms_and_conditions"].split('-')
for s in tnc:
print(s)
print("Date of Issue: ", line["date_of_issue"])
print("Duration: %d months" % (line["durantion_in_months"]))
print("Premium: ", line["premium_value"])
print("Sum assured: ", line["sum_assured"])
print("Department number of issuing employee: ",
line["employee_department_no"])
print("Serial number of issuing employee: ",
line["employee_serial_no"])
if len(claimt) > 0:
print("Policy Status: Claimed")
print("Date of Claim: ", doc)
else:
print("Policy Status: Not Claimed")
if top == 1:
query = "SELECT * FROM Beneficiaries WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
ben = cur.fetchall()
names = ""
for x in ben:
names += (", %s" % (x["name_of_beneficiary"]))
print("Death Value Benefit: ", life["death_value_benefit"])
print("Medical History of Customer: ")
mh = life["medical_history"].split('-')
for s in mh:
print("\t", s)
if len(names) > 0:
print("Names of beneficiaries of Customer: ", names[2:])
else:
print(
"Names of beneficiaries of Customer: No beneficiaries mentioned")
elif top == 2:
query = "SELECT * FROM Conditions_Covered WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
cond = cur.fetchall()
cond_names = ""
for x in cond:
words = x["name_of_condition"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
cond_names += (", %s" % (s))
query = "SELECT * FROM Cashless_Hospitals WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
hosp = cur.fetchall()
hosp_names = ""
for x in hosp:
words = x["name_of_hospital"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
hosp_names += (", %s" % (s))
print("Death Value Benefit: ", med["death_value_benefit"])
if len(cond_names) > 0:
print("Conditions Covered by Policy: ", cond_names[2:])
else:
print("Conditions Covered by Policy: No conditions mentioned")
if len(hosp_names) > 0:
print(
"Cashless Hospitals having tie-ups with Policy: ", hosp_names[2:])
else:
print(
"Cashless Hospitals having tie-ups with Policy: No hospitals mentioned")
elif top == 3:
query = "SELECT * FROM Vehicle_Colours WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
colt = cur.fetchall()
col_names = ""
for x in colt:
words = x["colour"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
col_names += (", %s" % (s))
query = "SELECT * FROM Customer_License_No WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
lnt = cur.fetchall()
ln = ""
for x in lnt:
ln = x["customer_license_no"]
break
print("License Plate Number: ", vehicle["license_plate_no"])
if len(col_names) > 0:
print("Colours of Vehicle: ", col_names[2:])
else:
print("Colours of Vehicle: No colours mentioned")
print("Customer License Number: ", ln)
elif top == 4:
print("Replacement Cost of House: ", house["replacement_cost"])
print("House Address: %s, %s-%s, %s" %
(house["street_address"], house["city"], house["zip_code"], house["state"]))
elif top == 5:
query = "SELECT * FROM Travel_Destinations WHERE policy_id = '%s'" % (
line["policy_id"])
cur.execute(query)
destt = cur.fetchall()
dest_names = ""
for x in destt:
words = x["destination"].split('-')
s = words[0]
for i in range(1, len(words)):
s += (" %s" % (words[i]))
dest_names += (", %s" % (s))
print("Itenerary of Travel: ", travel["itenerary"])
print("Airline and Hotel Booking of Travel: ",
travel["airline_and_hotel_bookings"])
if len(dest_names) > 0:
print("Travel Destinations: ", dest_names[2:])
else:
print("Travel Destinations: No destinations mentioned")
print()
cntr += 1
con.commit()
except Exception as e:
con.rollback()
print("Failed to retrieve from database")
print(">>>>>>>>>>>>>", e)
return
def dispatch(ch):
if ch == 1:
AddCustomer()
elif ch == 2:
AddCustomerDependant()
elif ch == 3:
AddEmployee()
elif ch == 4:
AddEmployeeDependant()
elif ch == 5:
AddLifeInsurance(AddPolicy())
elif ch == 6:
AddMedicalInsurance(AddPolicy())
elif ch == 7:
AddVehicleInsurance(AddPolicy())
elif ch == 8:
AddHouseInsurance(AddPolicy())
elif ch == 9:
AddTravelInsurance(AddPolicy())
elif ch == 10:
AddTPA()
elif ch == 11:
ResloveClaim()
elif ch == 12:
UpdateCustomerInfo()
elif ch == 13:
UpdateCustomerDependentInfo()
elif ch == 14:
UpdateEmployeeInfo()
elif ch == 15:
UpdateEmployeeDependantInfo()
elif ch == 16:
UpdateTPAinfo()
elif ch == 17:
UpdatePolicy()
elif ch == 18:
DeleteCustomer()
elif ch == 19:
DeleteCustomerDependant(None, None, None)
elif ch == 20:
DeleteEmployee()
elif ch == 21:
DeleteEmployeeDependant(None, None, None)
elif ch == 22:
DeletePolicy("")
elif ch == 23:
DeleteTPA()
elif ch == 24:
GetAllCustomers()
elif ch == 25:
GetAllPoliciesOfCustomers()
elif ch == 26:
GetAllDependentsOfCustomers()
elif ch == 27:
GetAllDependentsOfEmployee()
elif ch == 28:
GetAllTPAsInCity()
elif ch == 29:
GetInsurancePolicies()
elif ch == 30:
GetInsuranceCustomers()
elif ch == 31:
GetPoliciesWithPremium()
elif ch == 32:
GetCustomersOfAge()
elif ch == 33:
GetTotalClaim()
elif ch == 34:
GetMaxClaim()
elif ch == 35:
GetMinClaim()
elif ch == 36:
GetAverageClaim()
elif ch == 37:
SearchCustomerByName()
elif ch == 38:
SearchEmployeeByName()
elif ch == 39:
SearchTPAByName()
elif ch == 40:
GenerateClaimedPolicyReports()
elif ch == 41:
GenerateCustomerReport()
else:
print("Error: Invalid Option")
# Global
while(1):
tmp = sp.call('clear', shell=True)
# Can be skipped if you want to hard core username and password
username = input("Username: ")
password = input("Password: ")
try:
# Set db name accordingly which have been create by you
# Set host to the server's address if you don't want to use local SQL server
con = pymysql.connect(host='localhost',
user=username,
password=password,
port=5005,
db='Insurance_company',
cursorclass=pymysql.cursors.DictCursor)
tmp = sp.call('clear', shell=True)
if(con.open):
print("Connected")
else:
print("Failed to connect")
tmp = input("Enter any key to CONTINUE>")
with con.cursor() as cur:
while(1):
tmp = sp.call('clear', shell=True)
print(
"Please enter the number cooresponding to the action you want to perform:")
print("1. Add a Customer")
print("2. Add a Customer Dependant")
print("3. Add an Employee")
print("4. Add a Employee Dependant")
print("5. Add a Life Insurance Policy")
print("6. Add a Medical Insurance Policy")
print("7. Add a Vehicle Insurance Policy")
print("8. Add a House Insurance Policy")
print("9. Add a Travel Insurance Policy")
print("10. Add a Third Party Administrator")
print("11. Reslove a Policy Claim")
print("12. Update the information of an existing Customer")
print(
"13. Update the information of an existing Dependant of an existing Customer")
print("14. Update the information of an Employee")
print(
"15. Update the information of an existing Dependant of an existing Employee")
print("16. Update the information of a Third Party Administrator")
print("17. Update the information of a Policy")
print("18. Delete the information of a Customer")
print("19. Delete the information of a Customer's Dependant")
print("20. Delete the information of an Employee")
print("21. Delete the information of a Employee's Dependant")
print("22. Delete the information of a Policy")
print("23. Delete the information of a Third Party Administrator")
print("24. Get all Customers of given status")
print("25. Get all Policies bought by given customer")
print("26. Get inforfmation of all Dependents of a Customer")
print("27. Get inforfmation of all Dependents of an Employee")
print(
"28. Get inforfmation of all Third Party Administrators in a given city")
print("29. Get inforfmation of all Policies in a given list")
print("30. Get inforfmation of all Customers with status in a given list and who have bought policies of a type in a given list")
print(
"31. Get inforfmation of all Policies with their premium in a given range")
print("32. Get inforfmation of all Customers of a given age")
print("33. Get total claim value of policy types of a given list")
print("34. Get Maximum claim value of a given policy type")
print("35. Get Minimum claim value of a given policy type")
print("36. Get Average claim value of a given policy type")
print(
"37. Get inforfmation of all Customers with partial/complete match to given name")
print(
"38. Get inforfmation of all Employees with partial/complete match to given name")
print(
"39. Get inforfmation of all Third Party Administrators with partial/complete match to given name")
print(
"40. Generate reports of all claimed policies of a given list of policy types")
print("41. Generate reports of all policies bought by a given Customer")
print("0. Logout")
ch = int(input("Enter choice> "))
tmp = sp.call('clear', shell=True)
if ch == 0:
break
else:
dispatch(ch)
tmp = input("Enter any key to CONTINUE>")
except:
tmp = sp.call('clear', shell=True)
print("Connection Refused: Either username or password is incorrect or user doesn't have access to database")
tmp = input("Enter any key to CONTINUE>")
| 36.725644
| 289
| 0.496341
| 14,155
| 131,184
| 4.492617
| 0.033698
| 0.04164
| 0.058497
| 0.025129
| 0.882927
| 0.865551
| 0.845455
| 0.822024
| 0.801739
| 0.785354
| 0
| 0.006187
| 0.367964
| 131,184
| 3,571
| 290
| 36.735928
| 0.760797
| 0.001974
| 0
| 0.823529
| 0
| 0.012816
| 0.347981
| 0.012909
| 0.000329
| 0
| 0
| 0
| 0
| 1
| 0.01676
| false
| 0.000986
| 0.001972
| 0
| 0.061124
| 0.1791
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37b70f1bd309b3ff93c88eccc5b19ee6d53a1186
| 141,873
|
py
|
Python
|
gnosis/eth/tests/mocks/mock_trace_block.py
|
titandac/gnosis-py
|
cf0af4f25e64b22256eabb415d0f3fe3a6180b14
|
[
"MIT"
] | 64
|
2018-09-26T19:56:50.000Z
|
2022-03-18T21:45:59.000Z
|
gnosis/eth/tests/mocks/mock_trace_block.py
|
zhanghao-ic/gnosis-py
|
d2a5912547b7d1b576c826909f4c1d0155db536f
|
[
"MIT"
] | 151
|
2018-09-10T21:42:05.000Z
|
2022-03-31T12:33:31.000Z
|
gnosis/eth/tests/mocks/mock_trace_block.py
|
zhanghao-ic/gnosis-py
|
d2a5912547b7d1b576c826909f4c1d0155db536f
|
[
"MIT"
] | 50
|
2018-12-13T20:43:46.000Z
|
2022-03-30T09:32:32.000Z
|
from hexbytes import HexBytes
trace_block_2191709_mock = [
{
"action": {
"from": "0xEA674fdDe714fd979de3EdF0F56AA9716B898ec8",
"gas": 69000,
"value": 1000801159649151900,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x7b5A4767158DfBbaFcDE969F2B4d7FBCC19b5d3c",
},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x8888796bdf74b616f8900d41094bd8213b1c73385916ce4c8f5bb020f3acba3c",
"transactionPosition": 0,
"type": "call",
},
{
"action": {
"from": "0x2a65Aca4D5fC5B5C859090a6c34d164135398226",
"gas": 69000,
"value": 511624720000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x038a877d4fB63A195c22783888DA2041911Ed818",
},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x878d326c7cc7fd39cf7875d2ec3a69a910409d313eab3764fe253b9fed5acd60",
"transactionPosition": 1,
"type": "call",
},
{
"action": {
"from": "0x7B60F3f033e6D7808428C9f2dBC81682ff24984F",
"gas": 108562,
"value": 286012610000000000,
"callType": "call",
"input": HexBytes(
"0x0f2c9329000000000000000000000000fbb1b73c4f0bda4f67dca266ce6ef42f520fbb98000000000000000000000000e592b0d8baa2cb677034389b76a71b0d1823e0d1"
),
"to": "0xE94b04a0FeD112f3664e45adb2B8915693dD5FF3",
},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": {
"gasUsed": 8562,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x849da57a0cbed360cb0367f95650c0f2cf5e37b394a519748779d825cffba6fe",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0xE94b04a0FeD112f3664e45adb2B8915693dD5FF3",
"gas": 83248,
"value": 0,
"callType": "call",
"input": HexBytes("0x16c72721"),
"to": "0x2BD2326c993DFaeF84f696526064FF22eba5b362",
},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": {
"gasUsed": 197,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x849da57a0cbed360cb0367f95650c0f2cf5e37b394a519748779d825cffba6fe",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0xE94b04a0FeD112f3664e45adb2B8915693dD5FF3",
"gas": 2300,
"value": 286012610000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xFBb1b73C4f0BDa4f67dcA266ce6Ef42f520fBB98",
},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1],
"transactionHash": "0x849da57a0cbed360cb0367f95650c0f2cf5e37b394a519748779d825cffba6fe",
"transactionPosition": 2,
"type": "call",
},
{
"action": {"value": 5000000000000000000},
"blockHash": "0x4169fc8dfb9ece41c90044ebc9b8e2daed9f5e08c0ba3746e337732aa48b3bc3",
"blockNumber": 2191709,
"result": None,
"subtraces": 0,
"traceAddress": [],
"transactionHash": None,
"transactionPosition": None,
"type": "reward",
},
]
trace_block_13191781_mock = [
{
"action": {
"from": "0x061A9D627028fE708E1b77e591b9bdF41392D4Ba",
"gas": 36942,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb00000000000000000000000010c6b61dbf44a083aec3780acf769c77be747e23000000000000000000000000000000000000000000000000000000012a05f200"
),
"to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 26917,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0x0ba656badfa135dec7389317fefac737f75276bbbea10e103837d87f9e853aec",
"transactionPosition": 0,
"type": "call",
},
{
"action": {
"from": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"gas": 29233,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0xa9059cbb00000000000000000000000010c6b61dbf44a083aec3780acf769c77be747e23000000000000000000000000000000000000000000000000000000012a05f200"
),
"to": "0xa2327a938Febf5FEC13baCFb16Ae10EcBc4cbDCF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 19628,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x0ba656badfa135dec7389317fefac737f75276bbbea10e103837d87f9e853aec",
"transactionPosition": 0,
"type": "call",
},
{
"action": {
"from": "0x4091243E3fB5E637D06c265C6EAe1Be7fb8460Ce",
"gas": 262809,
"value": 96000000000000000,
"callType": "call",
"input": HexBytes(
"0xab834bab0000000000000000000000007be8076f4ea4a4ad08075c2508e481d6c946d12b0000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000002c4965962223405a7b3520b4cdcf2a401620d28d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb183000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007be8076f4ea4a4ad08075c2508e481d6c946d12b0000000000000000000000002c4965962223405a7b3520b4cdcf2a401620d28d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000005b3256965e7c3cf26e11fcaf296dfc8807c01073000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb1830000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001550f7dca700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a09af00000000000000000000000000000000000000000000000000000000000000009b10f6948e4ee3e346be766de489e25b43d0e52025c59350fd5ad70f5a497d7900000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001550f7dca700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a08e300000000000000000000000000000000000000000000000000000000000000008efd10aa1e620120adc5284569a80f5e2a58de8f7d4acd89bbc932aeb07547c40000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006a0000000000000000000000000000000000000000000000000000000000000074000000000000000000000000000000000000000000000000000000000000007e0000000000000000000000000000000000000000000000000000000000000088000000000000000000000000000000000000000000000000000000000000009200000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b2b701b34eaf05a2adc84f6c281fedfe6f3170c7bf9fa6fda7f313806bf050e2160a1849a6933219e5fceefbbaf38c0e4525856005b5a869bd432e5408348a1c92b701b34eaf05a2adc84f6c281fedfe6f3170c7bf9fa6fda7f313806bf050e2160a1849a6933219e5fceefbbaf38c0e4525856005b5a869bd432e5408348a1c929b2f895343cadfb3f5101bef6484b1f01c83dc9000000000000000000000000000000000000000000000000000000000000000000000000000000000000006423b872dd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce00000000000000000000000000000000000000000000000000000000000008df00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006423b872dd0000000000000000000000002c4965962223405a7b3520b4cdcf2a401620d28d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008df00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006400000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"error": "Reverted",
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xbe99757628bfc3d5c7ee4e42c2629ddd13ac52354e6abb189efe5e277dce05b3",
"transactionPosition": 1,
"type": "call",
},
{
"action": {
"from": "0x4091243E3fB5E637D06c265C6EAe1Be7fb8460Ce",
"gas": 276407,
"value": 99000000000000000,
"callType": "call",
"input": HexBytes(
"0xab834bab0000000000000000000000007be8076f4ea4a4ad08075c2508e481d6c946d12b0000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb183000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007be8076f4ea4a4ad08075c2508e481d6c946d12b0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf200000000000000000000000000000000000000000000000000000000000000000000000000000000000000005b3256965e7c3cf26e11fcaf296dfc8807c01073000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb1830000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fa000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000015fb7f9b8c38000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a0a15000000000000000000000000000000000000000000000000000000000000000067f00b2bff164f875757b4e04c91d204178ea3a194bac0ad2664b89e9db0ec2f00000000000000000000000000000000000000000000000000000000000000fa000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000015fb7f9b8c38000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a088900000000000000000000000000000000000000000000000000000000000000009a6ae9e029e0337dfbb4b063a35c4884bbd52effd759ccf202f028f7c7a88dd50000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006a0000000000000000000000000000000000000000000000000000000000000074000000000000000000000000000000000000000000000000000000000000007e0000000000000000000000000000000000000000000000000000000000000088000000000000000000000000000000000000000000000000000000000000009200000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b6ea4d9de0a1eda9a6107324f9560b731cbb2a27c2b2fd3d736836f7b00ab88b52b1f5898f794406b993e625218e8d442c58a41ffecca83ef439b7c3d0707b5dc6ea4d9de0a1eda9a6107324f9560b731cbb2a27c2b2fd3d736836f7b00ab88b52b1f5898f794406b993e625218e8d442c58a41ffecca83ef439b7c3d0707b5dc29b2f895343cadfb3f5101bef6484b1f01c83dc9000000000000000000000000000000000000000000000000000000000000000000000000000000000000006423b872dd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000000000000000000000000000000000000000000b6200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006423b872dd0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b6200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006400000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 201933, "output": HexBytes("0x")},
"subtraces": 6,
"traceAddress": [],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 227471,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xc45527910000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf2"
),
"to": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2782,
"output": HexBytes(
"0x000000000000000000000000892c0feffe706b811a8437ab9e2293fa5f7b907a"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 223931,
"value": 0,
"callType": "call",
"input": HexBytes("0x97204d8e"),
"to": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2613,
"output": HexBytes(
"0x000000000000000000000000f9e266af4bca5890e2781812cc6a6e89495a79f2"
),
},
"subtraces": 0,
"traceAddress": [1],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 218434,
"value": 0,
"callType": "call",
"input": HexBytes("0x5c60da1b"),
"to": "0x892C0FEfFE706b811a8437aB9e2293FA5F7b907A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2525,
"output": HexBytes(
"0x000000000000000000000000f9e266af4bca5890e2781812cc6a6e89495a79f2"
),
},
"subtraces": 0,
"traceAddress": [2],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 2300,
"value": 2475000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x5b3256965e7C3cF26E11FCAf296DfC8807C01073",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [3],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 2300,
"value": 96525000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x8bdBF4B19cb840e9Ac9B1eFFc2BfAd47591B5bF2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [4],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x7Be8076f4EA4A4AD08075C2508e481d6C946D12b",
"gas": 173586,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x1b0f7ba9000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb18300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000006423b872dd0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf20000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000000000000000000000000000000000000000000b6200000000000000000000000000000000000000000000000000000000"
),
"to": "0x892C0FEfFE706b811a8437aB9e2293FA5F7b907A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 98215,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [5],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x892C0FEfFE706b811a8437aB9e2293FA5F7b907A",
"gas": 167709,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x1b0f7ba9000000000000000000000000300ef850ca7754437cfce52fe0c47e5f890fb18300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000006423b872dd0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf20000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000000000000000000000000000000000000000000b6200000000000000000000000000000000000000000000000000000000"
),
"to": "0xF9e266af4BcA5890e2781812cc6a6E89495a79f2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 94955,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 2,
"traceAddress": [5, 0],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x892C0FEfFE706b811a8437aB9e2293FA5F7b907A",
"gas": 159793,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x69dc9ff30000000000000000000000007be8076f4ea4a4ad08075c2508e481d6c946d12b"
),
"to": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2553,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [5, 0, 0],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0x892C0FEfFE706b811a8437aB9e2293FA5F7b907A",
"gas": 156423,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd0000000000000000000000008bdbf4b19cb840e9ac9b1effc2bfad47591b5bf20000000000000000000000004091243e3fb5e637d06c265c6eae1be7fb8460ce0000000000000000000000000000000000000000000000000000000000000b6200000000000000000000000000000000000000000000000000000000"
),
"to": "0x300Ef850CA7754437cFcE52Fe0C47e5f890FB183",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 86058, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [5, 0, 1],
"transactionHash": "0x7798b6ef9f281c87d45bdeabda70e4b9f24085c15aaac8e75a9f4480fb2feeef",
"transactionPosition": 2,
"type": "call",
},
{
"action": {
"from": "0xF598b81Ef8c7b52a7F2a89253436e72ec6DC871f",
"gas": 84000,
"value": 140949999999999984,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xb0BE4D6159d6480980bCCe0f8b4F0d487e8450BD",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xe0cf30aaa01f5cd786db4b14daf61aac44143778926d5e33ce416a2709d3713b",
"transactionPosition": 3,
"type": "call",
},
{
"action": {
"from": "0xf60c2Ea62EDBfE808163751DD0d8693DCb30019c",
"gas": 186128,
"value": 109720000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xe74B4E405768BcC2B6deda7710f659ba7924A245",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xf092fb03ceca9ff7e01e5b6483bfac47a7db6d8bf0a32ac44b45f8a8f0b0b665",
"transactionPosition": 4,
"type": "call",
},
{
"action": {
"from": "0x9E5D17e8E34d2568200C154895ba63523b3560C8",
"gas": 129000,
"value": 0,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x9E5D17e8E34d2568200C154895ba63523b3560C8",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x565d69c19ae671197d53591caff0d945b72f4ac0a1a6d917bda7e2179906055d",
"transactionPosition": 5,
"type": "call",
},
{
"action": {
"from": "0xE851D0A60f038a8B2FF25649cFF4Aa4209c993CE",
"gas": 40631,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000f5c48fb2f53b8c2d43a4d4a8eca7abfed364e1e90000000000000000000000000000000000000000000000f0fd3f0144b363e800"
),
"to": "0xE41d2489571d322189246DaFA5ebDe1F4699F498",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 30250,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x13a3a8911e7314c8c38bdb634abab5c503aca77bab0755c36fb3d1a8e61e7820",
"transactionPosition": 6,
"type": "call",
},
{
"action": {
"from": "0xd5351b44102aaBB21022440e29B4295B56016ddF",
"gas": 0,
"value": 135417422759250043,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xa6eeFBd51A818DCBc77d6e8eF6Bd59ab61c403dF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xc3f27af4f5b8a3063b6dbd0a08019100867afb00f47227cc6e03a9c647777483",
"transactionPosition": 7,
"type": "call",
},
{
"action": {
"from": "0x5041ed759Dd4aFc3a72b8192C143F72f4724081A",
"gas": 398392,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000296897c5b419c2217719dc699d244e595d675d0700000000000000000000000000000000000000000000000000000000971a1930"
),
"to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 26917,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0x7011ea340523e70f4a18f178c20367816cab4e45aa7d31e705bdb5b51265548f",
"transactionPosition": 8,
"type": "call",
},
{
"action": {
"from": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"gas": 385036,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0xa9059cbb000000000000000000000000296897c5b419c2217719dc699d244e595d675d0700000000000000000000000000000000000000000000000000000000971a1930"
),
"to": "0xa2327a938Febf5FEC13baCFb16Ae10EcBc4cbDCF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 19628,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x7011ea340523e70f4a18f178c20367816cab4e45aa7d31e705bdb5b51265548f",
"transactionPosition": 8,
"type": "call",
},
{
"action": {
"from": "0x9AA65464b4cFbe3Dc2BDB3dF412AeE2B3De86687",
"gas": 228392,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb00000000000000000000000086ff7ab903e3c557aee37b383c0ceabf9ee5690700000000000000000000000000000000000000000000000000000000e880f062"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 41601, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x58d69c294aaa1503e06c4a69c895cd4fbc47e03f0a341a26c7255609d6b58c79",
"transactionPosition": 9,
"type": "call",
},
{
"action": {
"from": "0x3B794929566e3Ba0f25e4263e1987828b5c87161",
"gas": 29000,
"value": 13239660934222264,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xcAE2c1225481162eFF4c0807c7607724E5c29c9D",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x7b5e1ecaffd9bab57d8b59bf476d031bb2517192139b84362db866b88d40a295",
"transactionPosition": 10,
"type": "call",
},
{
"action": {
"from": "0x1f8F16a29251fA399D89e1005E3f95427Bf5B1dE",
"gas": 0,
"value": 4676123280340692,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x043aD94aeC8f88a62B6b0f130ccC61aC39f77A3c",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x2542cee5c144aada422aa08df4b458c594baae89061d0ae41d8b9678c527f51d",
"transactionPosition": 11,
"type": "call",
},
{
"action": {
"from": "0x307082e6926E4c004F3c821cb1Af08b8A2D80242",
"gas": 53404,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb0000000000000000000000007726f93410e15e64113e3303deb73721f36a01ef00000000000000000000000000000000000000000000000000000000b2d05e00"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 41601, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x6e90803bf18163a7797d9b35fd5403c485784bf8c9480b2e1fc9b5d81194d8d2",
"transactionPosition": 12,
"type": "call",
},
{
"action": {
"from": "0xC55EdDadEeB47fcDE0B3B6f25BD47D745BA7E7fa",
"gas": 0,
"value": 118400000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x1645521a6df217605d9949AafB84927018868cDf",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x973702b743a2e3b8caae347f74f2729b5a9cd20a5c8472b94c6758daac4407ee",
"transactionPosition": 13,
"type": "call",
},
{
"action": {
"from": "0xc9f5296Eb3ac266c94568D790b6e91ebA7D76a11",
"gas": 228404,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb0000000000000000000000009f71bcf338be16007ea0502e60aa5b527677a0fc0000000000000000000000000000000000000000000000000000000002faf080"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 41601, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xd80a9b37571313117f0d34fe3152f1ad4fee03f1a6c629ff7d2bf966a37ed14f",
"transactionPosition": 14,
"type": "call",
},
{
"action": {
"from": "0xc9f5296Eb3ac266c94568D790b6e91ebA7D76a11",
"gas": 228356,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000274de18b0e2c24864cfdbd0db156de962b48045e000000000000000000000000000000000000000000000035fe46d2f741100000"
),
"to": "0xa117000000f279D81A1D3cc75430fAA017FA5A2e",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 29842,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x5cf28c6818d072ea14ac6f5923431202a3af58e73c833d525d75af61078ef37d",
"transactionPosition": 15,
"type": "call",
},
{
"action": {
"from": "0xc9f5296Eb3ac266c94568D790b6e91ebA7D76a11",
"gas": 40,
"value": 7088000000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x5EE7BC4c49c653778597900143702691E8AFCE48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xe8385591d43060730b568d5762656d72c4597f3c63c17e03143abb4cd2ce2549",
"transactionPosition": 16,
"type": "call",
},
{
"action": {
"from": "0xc9f5296Eb3ac266c94568D790b6e91ebA7D76a11",
"gas": 228024,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd00000000000000000000000053ab688f20fdc725325b06c788226794acc47628000000000000000000000000c9f5296eb3ac266c94568d790b6e91eba7d76a11000000000000000000000000000000000000000000000000000000746a528800"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 26530, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x072f7a1f91c6281907b0b0a001bb2a58cb0e199bcf72f439b816ee7e83d1a978",
"transactionPosition": 17,
"type": "call",
},
{
"action": {
"from": "0x18db8B99c1d6E439fa44Fd87Bfb1109e345e98Da",
"gas": 193674,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x38ed17390000000000000000000000000000000000000000033b2e3c9fd0803ce80000000000000000000000000000000000000000000000000000000000000281c5b6a100000000000000000000000000000000000000000000000000000000000000a000000000000000000000000018db8b99c1d6e439fa44fd87bfb1109e345e98da00000000000000000000000000000000000000000000000000000000613a117b000000000000000000000000000000000000000000000000000000000000000300000000000000000000000085f17cf997934a597031b2e18a9ab6ebd4b9f6a4000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7"
),
"to": "0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"error": "Reverted",
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0xfac7403428a8213f3fc296412eb3f259086d80dd83be2d819b574b145b8d4855",
"transactionPosition": 18,
"type": "call",
},
{
"action": {
"from": "0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F",
"gas": 185741,
"value": 0,
"callType": "staticcall",
"input": HexBytes("0x0902f1ac"),
"to": "0x6469B34a2a4723163C4902dbBdEa728D20693C12",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2517,
"output": HexBytes(
"0x0000000000000000000000000000000000000002c158b1afbb13485d76c9cfe00000000000000000000000000000000000000000000000238fbe7ba3db69d46700000000000000000000000000000000000000000000000000000000613a0ab4"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0xfac7403428a8213f3fc296412eb3f259086d80dd83be2d819b574b145b8d4855",
"transactionPosition": 18,
"type": "call",
},
{
"action": {
"from": "0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F",
"gas": 178382,
"value": 0,
"callType": "staticcall",
"input": HexBytes("0x0902f1ac"),
"to": "0x06da0fd433C1A5d7a4faa01111c044910A184553",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2517,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000005d91462602980322d2f000000000000000000000000000000000000000000000000000058ee2871cbb700000000000000000000000000000000000000000000000000000000613a0a97"
),
},
"subtraces": 0,
"traceAddress": [1],
"transactionHash": "0xfac7403428a8213f3fc296412eb3f259086d80dd83be2d819b574b145b8d4855",
"transactionPosition": 18,
"type": "call",
},
{
"action": {
"from": "0xC098B2a3Aa256D2140208C3de6543aAEf5cd3A94",
"gas": 42000,
"value": 903856630000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xAcF288a55C9e807e6B5d7DD4cB4f314eBe1E14FA",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x6b460cc1afa1311f9d698fb3b45a68ad537a30630b20db06a394e1d7de3c5a02",
"transactionPosition": 19,
"type": "call",
},
{
"action": {
"from": "0xEE022C4a3A8855356E78a3960A34842dC868B754",
"gas": 48380,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb0000000000000000000000008abdfb25f4d46e59c4a85e3da026679999f00291000000000000000000000000000000000000000000000000000000069275d6c0"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 24501, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x9665b3325bd3a57d61c7d185f1eb428b61ef244561fa15fbb0237a4e67f593ea",
"transactionPosition": 20,
"type": "call",
},
{
"action": {
"from": "0x6871EaCd33fbcfE585009Ab64F0795d7152dc5a0",
"gas": 34024,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd00000000000000000000000041335ee132cdde75acac48f1489161128440444c0000000000000000000000006871eacd33fbcfe585009ab64f0795d7152dc5a00000000000000000000000000000000000000000000000000000000a0b9329c9"
),
"to": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 26530, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x3204a5a8a2c34a8f8224375968a440858aee459ca50278e33073939d9d2f0a79",
"transactionPosition": 21,
"type": "call",
},
{
"action": {
"from": "0x84ee5a99a08D98e2966B1a889fDCaB1CF3F7C589",
"gas": 0,
"value": 17790313368673531,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x1AB18ac546Cf48509D4cd41d48B41cc859A269A5",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x62dacd69eab666668088561b6d87e3f703973992b0f00666e09585af3264ea4d",
"transactionPosition": 22,
"type": "call",
},
{
"action": {
"from": "0xfa35113163bFD33c18A01d1A62d4D14a1Ed30a42",
"gas": 126069,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x2e95b6c80000000000000000000000006286a9e6f7e745a6d884561d88f94542d6715698000000000000000000000000000000000000000000000b5a0ebbfe5a15da000000000000000000000000000000000000000000000000000015b1e91b911a8aeb0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000140000000000000003b6d034084d1f4bbd0fb53b9a09e95e051f2fe1bf3e01e6a"
),
"to": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"error": "Reverted",
"subtraces": 5,
"traceAddress": [],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 120771,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd000000000000000000000000fa35113163bfd33c18a01d1a62d4d14a1ed30a4200000000000000000000000084d1f4bbd0fb53b9a09e95e051f2fe1bf3e01e6a000000000000000000000000000000000000000000000b5a0ebbfe5a15da0000"
),
"to": "0x6286A9e6f7e745A6D884561D88F94542d6715698",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 20711,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 97641,
"value": 0,
"callType": "staticcall",
"input": HexBytes("0x0902f1ac"),
"to": "0x84d1f4BBD0FB53b9a09e95E051f2fe1bF3e01e6A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2504,
"output": HexBytes(
"0x00000000000000000000000000000000000000000003cac63f6efb193364d5d200000000000000000000000000000000000000000000000755b352de71f2c1a600000000000000000000000000000000000000000000000000000000613a0acf"
),
},
"subtraces": 0,
"traceAddress": [1],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 94841,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x022c0d9f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000015a3896d5772879700000000000000000000000011111112542d85b3ef69ae05771c2dccff4faa2600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x84d1f4BBD0FB53b9a09e95E051f2fe1bF3e01e6A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 47804, "output": HexBytes("0x")},
"subtraces": 3,
"traceAddress": [2],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x84d1f4BBD0FB53b9a09e95E051f2fe1bF3e01e6A",
"gas": 80163,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb00000000000000000000000011111112542d85b3ef69ae05771c2dccff4faa2600000000000000000000000000000000000000000000000015a3896d57728797"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 12862,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [2, 0],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x84d1f4BBD0FB53b9a09e95E051f2fe1bF3e01e6A",
"gas": 66901,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a0823100000000000000000000000084d1f4bbd0fb53b9a09e95e051f2fe1bf3e01e6a"
),
"to": "0x6286A9e6f7e745A6D884561D88F94542d6715698",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 585,
"output": HexBytes(
"0x00000000000000000000000000000000000000000003d6204e2af973493ed5d2"
),
},
"subtraces": 0,
"traceAddress": [2, 1],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x84d1f4BBD0FB53b9a09e95E051f2fe1bF3e01e6A",
"gas": 65919,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a0823100000000000000000000000084d1f4bbd0fb53b9a09e95e051f2fe1bf3e01e6a"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 534,
"output": HexBytes(
"0x000000000000000000000000000000000000000000000007400fc9711a803a0f"
),
},
"subtraces": 0,
"traceAddress": [2, 2],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 47597,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x2e1a7d4d00000000000000000000000000000000000000000000000015a3896d57728797"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 9219, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [3],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"gas": 2300,
"value": 1559240998711887767,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 79, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [3, 0],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 31832,
"value": 1559240998711887767,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xfa35113163bFD33c18A01d1A62d4D14a1Ed30a42",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [4],
"transactionHash": "0xd1118a18e43777636ccef0cafa5de58c3b0c6800454606342ba46a662828a8c6",
"transactionPosition": 23,
"type": "call",
},
{
"action": {
"from": "0xACc300998060e519d10977e25f8ef2455f5330f7",
"gas": 28478,
"value": 10000000000000000,
"callType": "call",
"input": HexBytes("0xd0e30db0"),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 23974, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x1ea951dcaf2bda8a8fb8251b2592561f35af0c879d78f34cd4ef9110f1b1b4c7",
"transactionPosition": 24,
"type": "call",
},
{
"action": {
"from": "0xc4F9F5F9910a59A58dB92a5ceEd80Ce1725C8855",
"gas": 80836,
"value": 80000000000000000,
"callType": "call",
"input": HexBytes(
"0xba93c39c000000000000000000000000fc7b1dad07111c77c5d619043d75ac9a19680760000000000000000000000000c4f9f5f9910a59a58db92a5ceed80ce1725c88550000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000011c37937e0800000000000000000000000000000000000000000000000000000000000010ea46140000000000000000000000000000000000000000000000000000000000c94a6e000000000000000000000000000000000000000000000000000000000000003f00000000000000000000000000000000000000000000000000000000000000000024cf29ee0de9b595a6dbddea0e2896d74de8ecbff141582259bb6b1513730200000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000041f8a8837c1fa4ec249ce079090fede86bfbf2273d2523186e0621e44af760eaf13d0c18a82851be01f690eee7cb71e8e2c470cf6552a5063deec2dc96ab6c193d1b00000000000000000000000000000000000000000000000000000000000000"
),
"to": "0xa18607cA4A3804CC3Cd5730eafeFcC47a7641643",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 78872, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0x28790ea8588f1c24cbc3dacb95bad48556df979755563626652a9b3aa55799bf",
"transactionPosition": 25,
"type": "call",
},
{
"action": {
"from": "0xa18607cA4A3804CC3Cd5730eafeFcC47a7641643",
"gas": 68089,
"value": 80000000000000000,
"callType": "call",
"input": HexBytes(
"0xecc0661a000000000000000000000000c4f9f5f9910a59a58db92a5ceed80ce1725c88550000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000011c37937e0800000000000000000000000000000000000000000000000000000000000010ea4614000000000000000000000000000000000000000000000000000000000000003f00000000000000000000000000000000000000000000000000000000000000000024cf29ee0de9b595a6dbddea0e2896d74de8ecbff141582259bb6b1513730200000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000041f8a8837c1fa4ec249ce079090fede86bfbf2273d2523186e0621e44af760eaf13d0c18a82851be01f690eee7cb71e8e2c470cf6552a5063deec2dc96ab6c193d1b00000000000000000000000000000000000000000000000000000000000000"
),
"to": "0xfc7b1daD07111c77c5d619043D75aC9A19680760",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 67101, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [0],
"transactionHash": "0x28790ea8588f1c24cbc3dacb95bad48556df979755563626652a9b3aa55799bf",
"transactionPosition": 25,
"type": "call",
},
{
"action": {
"from": "0xfc7b1daD07111c77c5d619043D75aC9A19680760",
"gas": 64354,
"value": 80000000000000000,
"callType": "delegatecall",
"input": HexBytes(
"0xecc0661a000000000000000000000000c4f9f5f9910a59a58db92a5ceed80ce1725c88550000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000000000000000000000000000011c37937e080000000000000000000000000000000000000000000000000000011c37937e0800000000000000000000000000000000000000000000000000000000000010ea4614000000000000000000000000000000000000000000000000000000000000003f00000000000000000000000000000000000000000000000000000000000000000024cf29ee0de9b595a6dbddea0e2896d74de8ecbff141582259bb6b1513730200000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000041f8a8837c1fa4ec249ce079090fede86bfbf2273d2523186e0621e44af760eaf13d0c18a82851be01f690eee7cb71e8e2c470cf6552a5063deec2dc96ab6c193d1b00000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x20EF25713c37855fbB8ED483eFDDFF9407442650",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 64354, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [0, 0],
"transactionHash": "0x28790ea8588f1c24cbc3dacb95bad48556df979755563626652a9b3aa55799bf",
"transactionPosition": 25,
"type": "call",
},
{
"action": {
"from": "0xfc7b1daD07111c77c5d619043D75aC9A19680760",
"gas": 46228,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000c4f9f5f9910a59a58db92a5ceed80ce1725c88550000000000000000000000000000000000000000000000000000000010ea4614"
),
"to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 44017,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [0, 0, 0],
"transactionHash": "0x28790ea8588f1c24cbc3dacb95bad48556df979755563626652a9b3aa55799bf",
"transactionPosition": 25,
"type": "call",
},
{
"action": {
"from": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"gas": 38374,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0xa9059cbb000000000000000000000000c4f9f5f9910a59a58db92a5ceed80ce1725c88550000000000000000000000000000000000000000000000000000000010ea4614"
),
"to": "0xa2327a938Febf5FEC13baCFb16Ae10EcBc4cbDCF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 36728,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0, 0, 0, 0],
"transactionHash": "0x28790ea8588f1c24cbc3dacb95bad48556df979755563626652a9b3aa55799bf",
"transactionPosition": 25,
"type": "call",
},
{
"action": {
"from": "0x0211677061fB97872dD015d23fe44F3A0066Ccc3",
"gas": 25197,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa22cb465000000000000000000000000e987cbec33f573b020e7c5672f8008d847ab8b420000000000000000000000000000000000000000000000000000000000000001"
),
"to": "0x9640C1a69eadD073D273D75028a1D233CD63016C",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 25197, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xaaae290a863c7a364941fa047c5c18d0ea0e30f1cb3401537e274576a227b1b0",
"transactionPosition": 26,
"type": "call",
},
{
"action": {
"from": "0xAe45a8240147E6179ec7c9f92c5A18F9a97B3fCA",
"gas": 0,
"value": 5975748000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x61296A581598F58fA5841B44904D376eDA01127A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xc1338a392bbb9271c0de47705148127ed3685159918424a2431e085d67b0ef26",
"transactionPosition": 27,
"type": "call",
},
{
"action": {
"from": "0x1E9bABdb4743DbaAf287352BC9D3d8c31B0ff327",
"gas": 162307,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x875b4f6300000000000000000000000000000000000000000000000000000000000000600000000000000000000000002f8a0eecb02a2aa17e3db6de777ea0d941984cfa0000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000000002b0000000000000000000000000000000000000000000000000000000000000019d000000000000000000000000000000000000000000000000000000000000055a00000000000000000000000000000000000000000000000000000000000001bd00000000000000000000000000000000000000000000000000000000000000414fbe56bde7330228ad9399e9513544bcee5376331067d3ef651d2b41fb14df10039175fd80a5131928082c52e1f9fd497c3dc7b7d09334d6e5663b09728579721b00000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x00D07C53E70338c376cF6ab2A5218d8643115084",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 160190, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x662483ec1961250e0f24c9adffcf8d96d43418f573448867ecbdf65a31dd331c",
"transactionPosition": 28,
"type": "call",
},
{
"action": {
"from": "0x7F9Bb16Bb280D93e6e465420fD02d28b3A8fbc5f",
"gas": 211566,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x5f5755290000000000000000000000000000000000000000000000000000000000000080000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a00000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000136f6e65496e6368563346656544796e616d6963000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004476f38446e3da000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000009f7fe35818eae00000000000000000000000011ededebf63bef0ea2d2d071bdf88f71543ec6fb000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000c42e95b6c8000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a00000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004511aa72b8812440000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000140000000000000003b6d0340643b47d668f7bd78e0eeaa574b0d185c46ef079c00000000000000000000000000000000000000000000000000000000ab"
),
"to": "0x881D40237659C251811CEC9c364ef91dC08D300C",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 179647, "output": HexBytes("0x")},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x881D40237659C251811CEC9c364ef91dC08D300C",
"gas": 196872,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd0000000000000000000000007f9bb16bb280d93e6e465420fd02d28b3a8fbc5f00000000000000000000000074de5d4fcbf63e00296fd95d33236b979401663100000000000000000000000000000000000000000000004e184ccf4dc2aa7a88"
),
"to": "0x217ddEad61a42369A266F1Fb754EB5d3EBadc88a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 38792,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x881D40237659C251811CEC9c364ef91dC08D300C",
"gas": 149180,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xe35473350000000000000000000000004fed27eac9c2477b8c14ee8bada444bd4654f8330000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000022492f5f0370000000000000000000000007f9bb16bb280d93e6e465420fd02d28b3a8fbc5f000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004476f38446e3da000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000009f7fe35818eae00000000000000000000000011ededebf63bef0ea2d2d071bdf88f71543ec6fb000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000c42e95b6c8000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a00000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004511aa72b8812440000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000140000000000000003b6d0340643b47d668f7bd78e0eeaa574b0d185c46ef079c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 117818, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [1],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 141939,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x92f5f0370000000000000000000000007f9bb16bb280d93e6e465420fd02d28b3a8fbc5f000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004476f38446e3da000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000009f7fe35818eae00000000000000000000000011ededebf63bef0ea2d2d071bdf88f71543ec6fb000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000c42e95b6c8000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a00000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004511aa72b8812440000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000140000000000000003b6d0340643b47d668f7bd78e0eeaa574b0d185c46ef079c00000000000000000000000000000000000000000000000000000000"
),
"to": "0x4fEd27Eac9C2477B8c14Ee8baDA444BD4654F833",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 112694, "output": HexBytes("0x")},
"subtraces": 5,
"traceAddress": [1, 0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 138838,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0xdd62ed3e00000000000000000000000074de5d4fcbf63e00296fd95d33236b979401663100000000000000000000000011111112542d85b3ef69ae05771c2dccff4faa26"
),
"to": "0x217ddEad61a42369A266F1Fb754EB5d3EBadc88a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 3241,
"output": HexBytes(
"0xffffffffffffffffffffffffffffffffffffffffffff650eadd360798758cea6"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 131716,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x2e95b6c8000000000000000000000000217ddead61a42369a266f1fb754eb5d3ebadc88a00000000000000000000000000000000000000000000004e184ccf4dc2aa7a8800000000000000000000000000000000000000000000000004511aa72b8812440000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000140000000000000003b6d0340643b47d668f7bd78e0eeaa574b0d185c46ef079c"
),
"to": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 85953,
"output": HexBytes(
"0x00000000000000000000000000000000000000000000000004734857e2fb2007"
),
},
"subtraces": 5,
"traceAddress": [1, 0, 1],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 128791,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd00000000000000000000000074de5d4fcbf63e00296fd95d33236b9794016631000000000000000000000000643b47d668f7bd78e0eeaa574b0d185c46ef079c00000000000000000000000000000000000000000000004e184ccf4dc2aa7a88"
),
"to": "0x217ddEad61a42369A266F1Fb754EB5d3EBadc88a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 14892,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 1, 0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 111388,
"value": 0,
"callType": "staticcall",
"input": HexBytes("0x0902f1ac"),
"to": "0x643b47D668f7BD78E0EeaA574b0d185c46Ef079C",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2504,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000003f3c270741545064e830000000000000000000000000000000000000000000000003e47aebd48870924000000000000000000000000000000000000000000000000000000006139fc25"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 1, 1],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 108589,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x022c0d9f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004734857e2fb200700000000000000000000000011111112542d85b3ef69ae05771c2dccff4faa2600000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0x643b47D668f7BD78E0EeaA574b0d185c46Ef079C",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 48105, "output": HexBytes("0x")},
"subtraces": 3,
"traceAddress": [1, 0, 1, 2],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x643b47D668f7BD78E0EeaA574b0d185c46Ef079C",
"gas": 93696,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb00000000000000000000000011111112542d85b3ef69ae05771c2dccff4faa2600000000000000000000000000000000000000000000000004734857e2fb2007"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 12862,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 1, 2, 0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x643b47D668f7BD78E0EeaA574b0d185c46Ef079C",
"gas": 80434,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a08231000000000000000000000000643b47d668f7bd78e0eeaa574b0d185c46ef079c"
),
"to": "0x217ddEad61a42369A266F1Fb754EB5d3EBadc88a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 886,
"output": HexBytes(
"0x000000000000000000000000000000000000000000000441dabd436307b0c90b"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 1, 2, 1],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x643b47D668f7BD78E0EeaA574b0d185c46Ef079C",
"gas": 79156,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a08231000000000000000000000000643b47d668f7bd78e0eeaa574b0d185c46ef079c"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 534,
"output": HexBytes(
"0x00000000000000000000000000000000000000000000000039d46665658be91d"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 1, 2, 2],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 61048,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x2e1a7d4d00000000000000000000000000000000000000000000000004734857e2fb2007"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 9219, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [1, 0, 1, 3],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"gas": 2300,
"value": 320679540780900359,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 79, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 0, 1, 3, 0],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x11111112542D85B3EF69AE05771c2dCCff4fAa26",
"gas": 45283,
"value": 320679540780900359,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 40, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 0, 1, 4],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 37538,
"value": 2805945981832878,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x11eDedebF63bef0ea2d2D071bdF88F71543ec6fB",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 0, 2],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 37015,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a0823100000000000000000000000074de5d4fcbf63e00296fd95d33236b9794016631"
),
"to": "0x217ddEad61a42369A266F1Fb754EB5d3EBadc88a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 886,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000000"
),
},
"subtraces": 0,
"traceAddress": [1, 0, 3],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0x74de5d4FCbf63E00296fd95d33236B9794016631",
"gas": 28967,
"value": 317873594799067481,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x7F9Bb16Bb280D93e6e465420fD02d28b3A8fbc5f",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 0, 4],
"transactionHash": "0x7cd9fdf8ba8f3378d428e1cbb353a28142b28946840353df07b1140b5849b850",
"transactionPosition": 29,
"type": "call",
},
{
"action": {
"from": "0xb5d85CBf7cB3EE0D56b3bB207D5Fc4B82f43F511",
"gas": 0,
"value": 67660500000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x59b6E0185a290aC466A6c4B60093e33afeC7169b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xfc4763a159754d4839c1ca85382ba5f66be2cbfc33df28005e33c4928b541f97",
"transactionPosition": 30,
"type": "call",
},
{
"action": {
"from": "0x3cD751E6b0078Be393132286c442345e5DC49699",
"gas": 0,
"value": 70391500000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x1521A41240C40Cf441cc68dD7E0EED06e3dC72fF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x709198fe8195723ab00c2edf265ded5267dff4a7dcda49098a4783b4473b115e",
"transactionPosition": 31,
"type": "call",
},
{
"action": {
"from": "0xddfAbCdc4D8FfC6d5beaf154f18B778f892A0740",
"gas": 0,
"value": 119404330000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xA837149C978776B322fC7A6245a46AE89a4c5385",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xaa0a0d4f8bf778e909c48dcd13397140bfb791d86acf35a14f5271e44cfeac8f",
"transactionPosition": 32,
"type": "call",
},
{
"action": {
"from": "0x71660c4005BA85c37ccec55d0C4493E66Fe775d3",
"gas": 228404,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000f068633504bf13523ed3c976c33bd842502b377b0000000000000000000000000000000000000000000000000000000037131f00"
),
"to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 44017,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0xb3d846d3937d048de62e482bc19d5fc8447a74fdd3b709b64b52b33da0ffcdab",
"transactionPosition": 33,
"type": "call",
},
{
"action": {
"from": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"gas": 217704,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0xa9059cbb000000000000000000000000f068633504bf13523ed3c976c33bd842502b377b0000000000000000000000000000000000000000000000000000000037131f00"
),
"to": "0xa2327a938Febf5FEC13baCFb16Ae10EcBc4cbDCF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 36728,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0xb3d846d3937d048de62e482bc19d5fc8447a74fdd3b709b64b52b33da0ffcdab",
"transactionPosition": 33,
"type": "call",
},
{
"action": {
"from": "0x538CD83410D14d615590fb370E008F839CEA6024",
"gas": 0,
"value": 45600000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xCF5A1c6E4e157d63883f3aE9E62cAD2729838580",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x1ef77ab2e12ad5808999b5ee99ee93aa24a559849c7e2a9096ffbe7d56353b9b",
"transactionPosition": 34,
"type": "call",
},
{
"action": {
"from": "0x0C86284199fB87A0b391b02883b6613816393bFE",
"gas": 50968,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x095ea7b30000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488dffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
),
"to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 38367,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0xc7c7d796f48e336a5d895e064116ef21f1ba73826ab7b4cba208df47c03ce4ac",
"transactionPosition": 35,
"type": "call",
},
{
"action": {
"from": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"gas": 43040,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x095ea7b30000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488dffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
),
"to": "0xa2327a938Febf5FEC13baCFb16Ae10EcBc4cbDCF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 31078,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0xc7c7d796f48e336a5d895e064116ef21f1ba73826ab7b4cba208df47c03ce4ac",
"transactionPosition": 35,
"type": "call",
},
{
"action": {
"from": "0x02736d5c8dcea65539993d143A3DE90ceBcA9c3c",
"gas": 160566,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xac9650d800000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000104414bf389000000000000000000000000ba7970f10d9f0531941dced1dda7ef3016b24e5b000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000002710000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a11bc0000000000000000000000000000000000000000000000375c21cee45ab3852000000000000000000000000000000000000000000000000000eab1df8814dbac000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004449404b7c00000000000000000000000000000000000000000000000000eab1df8814dbac00000000000000000000000002736d5c8dcea65539993d143a3de90cebca9c3c00000000000000000000000000000000000000000000000000000000"
),
"to": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 128244,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000f66df78215e6a80000000000000000000000000000000000000000000000000000000000000000"
),
},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 156855,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x414bf389000000000000000000000000ba7970f10d9f0531941dced1dda7ef3016b24e5b000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000002710000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000613a11bc0000000000000000000000000000000000000000000000375c21cee45ab3852000000000000000000000000000000000000000000000000000eab1df8814dbac0000000000000000000000000000000000000000000000000000000000000000"
),
"to": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 107077,
"output": HexBytes(
"0x00000000000000000000000000000000000000000000000000f66df78215e6a8"
),
},
"subtraces": 1,
"traceAddress": [0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 147471,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x128acb08000000000000000000000000e592427a0aece92de3edee1f18e0157c0586156400000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000375c21cee45ab3852000000000000000000000000000000000000000000000000000000001000276a400000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000004000000000000000000000000002736d5c8dcea65539993d143a3de90cebca9c3c000000000000000000000000000000000000000000000000000000000000002bba7970f10d9f0531941dced1dda7ef3016b24e5b002710c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000"
),
"to": "0xC00C5977395664267c118d71569DCCF4BC37bF5F",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 99638,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000375c21cee45ab38520ffffffffffffffffffffffffffffffffffffffffffffffffff0992087dea1958"
),
},
"subtraces": 4,
"traceAddress": [0, 0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xC00C5977395664267c118d71569DCCF4BC37bF5F",
"gas": 110017,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa9059cbb000000000000000000000000e592427a0aece92de3edee1f18e0157c0586156400000000000000000000000000000000000000000000000000f66df78215e6a8"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 29962,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0, 0, 0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xC00C5977395664267c118d71569DCCF4BC37bF5F",
"gas": 77191,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a08231000000000000000000000000c00c5977395664267c118d71569dccf4bc37bf5f"
),
"to": "0xbA7970f10D9f0531941DcEd1dda7ef3016B24e5b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 2577,
"output": HexBytes(
"0x000000000000000000000000000000000000000000037a542567ad1af50e9ee7"
),
},
"subtraces": 0,
"traceAddress": [0, 0, 1],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xC00C5977395664267c118d71569DCCF4BC37bF5F",
"gas": 73864,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xfa461e330000000000000000000000000000000000000000000000375c21cee45ab38520ffffffffffffffffffffffffffffffffffffffffffffffffff0992087dea1958000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000004000000000000000000000000002736d5c8dcea65539993d143a3de90cebca9c3c000000000000000000000000000000000000000000000000000000000000002bba7970f10d9f0531941dced1dda7ef3016b24e5b002710c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000"
),
"to": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 22278, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [0, 0, 2],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 69034,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd00000000000000000000000002736d5c8dcea65539993d143a3de90cebca9c3c000000000000000000000000c00c5977395664267c118d71569dccf4bc37bf5f0000000000000000000000000000000000000000000000375c21cee45ab38520"
),
"to": "0xbA7970f10D9f0531941DcEd1dda7ef3016B24e5b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 18222,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [0, 0, 2, 0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xC00C5977395664267c118d71569DCCF4BC37bF5F",
"gas": 51301,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a08231000000000000000000000000c00c5977395664267c118d71569dccf4bc37bf5f"
),
"to": "0xbA7970f10D9f0531941DcEd1dda7ef3016B24e5b",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 577,
"output": HexBytes(
"0x000000000000000000000000000000000000000000037a8b81897bff4fc22407"
),
},
"subtraces": 0,
"traceAddress": [0, 0, 3],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 50754,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x49404b7c00000000000000000000000000000000000000000000000000eab1df8814dbac00000000000000000000000002736d5c8dcea65539993d143a3de90cebca9c3c"
),
"to": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 18173, "output": HexBytes("0x")},
"subtraces": 3,
"traceAddress": [1],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 49257,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x70a08231000000000000000000000000e592427a0aece92de3edee1f18e0157c05861564"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 534,
"output": HexBytes(
"0x00000000000000000000000000000000000000000000000000f66df78215e6a8"
),
},
"subtraces": 0,
"traceAddress": [1, 0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 48288,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x2e1a7d4d00000000000000000000000000000000000000000000000000f66df78215e6a8"
),
"to": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 9223, "output": HexBytes("0x")},
"subtraces": 1,
"traceAddress": [1, 1],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"gas": 2300,
"value": 69363754077644456,
"callType": "call",
"input": HexBytes("0x"),
"to": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 83, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 1, 0],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0xE592427A0AEce92De3Edee1F18E0157C05861564",
"gas": 32209,
"value": 69363754077644456,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x02736d5c8dcea65539993d143A3DE90ceBcA9c3c",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [1, 2],
"transactionHash": "0x2407e6b8a5be763a5f8c280f1ae10ff000ebaf75e1530a0f3651987ba311b2ba",
"transactionPosition": 36,
"type": "call",
},
{
"action": {
"from": "0x73800459807528072A3B2eD217c1De72F28514f3",
"gas": 734311,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xcb133b0f0000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000d8800000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001"
),
"to": "0xC0981Df196dc6c6fb8673B912B07956256D7e9fF",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 734311, "output": HexBytes("0x")},
"subtraces": 4,
"traceAddress": [],
"transactionHash": "0xf5e2ebf727d16274508ca3bd5f26929327e3a1fec8b6aafae19d42972547c153",
"transactionPosition": 37,
"type": "call",
},
{
"action": {
"from": "0xC0981Df196dc6c6fb8673B912B07956256D7e9fF",
"gas": 716086,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd00000000000000000000000073800459807528072a3b2ed217c1de72f28514f3000000000000000000000000c0981df196dc6c6fb8673b912b07956256d7e9ff0000000000000000000000000000000000000000000000000000000000000d88"
),
"to": "0xdEcC60000ba66700a009b8F9F7D82676B5cfA88A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 76963, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0xf5e2ebf727d16274508ca3bd5f26929327e3a1fec8b6aafae19d42972547c153",
"transactionPosition": 37,
"type": "call",
},
{
"action": {
"from": "0xC0981Df196dc6c6fb8673B912B07956256D7e9fF",
"gas": 453672,
"value": 0,
"callType": "staticcall",
"input": HexBytes("0x95d89b41"),
"to": "0xa6233451039230fAe712371dD7526f6Df7625E1f",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 3294,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000449524f4e00000000000000000000000000000000000000000000000000000000"
),
},
"subtraces": 0,
"traceAddress": [1],
"transactionHash": "0xf5e2ebf727d16274508ca3bd5f26929327e3a1fec8b6aafae19d42972547c153",
"transactionPosition": 37,
"type": "call",
},
{
"action": {
"from": "0xC0981Df196dc6c6fb8673B912B07956256D7e9fF",
"gas": 444267,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0x0f14d01a00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000034000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000d8800000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000000743617069746f6c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000641737472616c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084d6564696576616c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000055761746572000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757617272696e6700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c4469637461746f72736869700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000856616c68616c6c61000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000449524f4e00000000000000000000000000000000000000000000000000000000"
),
"to": "0x787D1B8bFe2142af127e62dcc15D63D6D708f85F",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 191993,
"output": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000006cd646174613a6170706c69636174696f6e2f6a736f6e3b6261736536342c65794a755957316c496a6f67496c4e6c644852735a57316c626e5167497a4d304e6a51694c4341695a47567a59334a7063485270623234694f694169553256306447786c6257567564484d6759584a6c49474567644856796269426959584e6c5a43426a61585a7062476c7a595852706232346763326c74645778686447397949484e3062334a6c5a43426c626e5270636d5673655342766269426a61474670626977675a3238675a6d3979644767675957356b49474e76626e46315a58497549697767496d6c745957646c496a6f67496d526864474536615731685a32557663335a6e4b3368746244746959584e6c4e6a51735545684f4d6c7035516a52695633683159336f7761574649556a426a5247393254444e6b4d3252354e544e4e655456325932316a646b31715158644e517a6c365a47316a61556c49516e6c61574535735932356162464659546e646156303477565731474d4746584f446c4a626d684f59566331576c52586248564a527a4673576c685261556c49576e426157475244596a4e6e4f556c715157644e51304636546c52425a3031365658644a616a5134597a4e534e574a485653744d626c49305a454e434e306c48576e4269523363325355644b63316c58546e4a5065554a74596a49314d457858576d68695632787a5a5652765a324a584f5856694d3035335756644f62453935516d31694d6a55775446684f6347567456545a4a524556355930686e4e325a5564335a6a4d314931596b64564b314249536d785a4d31466e5a444a73613252485a7a6c4a616b563354554e5661556c4861477868563252765a455177615531555158644b55306c6e576d317363324a454d476c6b4d6d68775a45645661556c444f43745153464a735a5568525a3256454d476c4e56454670535568724f556c715358644a61554a71596b6447656d4e364d476c6b5347677753576f3152466c59516e426b527a6c7a55454d354d46705961444251616e6777576c686f4d456c495a7a6c4a616b563353576c434e5642545354424e51306c6e57544a3461474d7a54546c4a626c49305a454e4a4b314659546a426a62555a7a55454d354d46705961444251616e6777576c686f4d456c495a7a6c4a616b563353576c434e56425453544a4e51306c6e57544a3461474d7a54546c4a626c49305a454e4a4b315258566d746856315979575664334f45777a556d786c5346457255456853624756495557646c524442705456524261556c49617a6c4a616d643353576c43616d4a48526e706a656a42705a45686f4d456c714e56685a57464a7359327033646d5248566a526b524451345a4564574e475244516a525155306c3454554e4a5a3256554d476c4e5645463353576c43616d4a48526e706a656a42705a45686f4d456c714e56685a5745703559566331626c42444f54426157476777554770344d4670596144424a534763355357704664306c70516a565155306c345457704261556c48546e4e5a5745353655464e4b4d47564955576c5161314a7757544e53614752484f586c6a4d6d687759305233646d5248566a526b524451345a4564574e475244516a525155306c3454554e4a5a3256554d476c4e5646463353576c43616d4a48526e706a656a42705a45686f4d456c714e56645a563368765756643463316c5564335a6b523159305a4551304f45777a546a4a61656a51394969776959585230636d6c696458526c6379493657337367496e527959576c3058335235634755694f69416955326c365a53497349434a32595778315a53493649434a445958427064473973496942394c43423749434a30636d4670644639306558426c496a6f67496c4e7761584a706443497349434a32595778315a53493649434a4263335279595777694948307349487367496e527959576c3058335235634755694f6941695157646c49697767496e5a686248566c496a6f67496b316c5a476c6c646d4673496942394c43423749434a30636d4670644639306558426c496a6f67496c4a6c63323931636d4e6c49697767496e5a686248566c496a6f67496c646864475679496942394c43423749434a30636d4670644639306558426c496a6f67496b3176636d46735a53497349434a32595778315a53493649434a5859584a796157356e496942394c43423749434a30636d4670644639306558426c496a6f67496b6476646d5679626d316c626e51694c434169646d4673645755694f69416952476c6a6447463062334a7a61476c77496942394c43423749434a30636d4670644639306558426c496a6f67496c4a6c5957787449697767496e5a686248566c496a6f67496c5a686247686862477868496942395858303d00000000000000000000000000000000000000"
),
},
"subtraces": 0,
"traceAddress": [2],
"transactionHash": "0xf5e2ebf727d16274508ca3bd5f26929327e3a1fec8b6aafae19d42972547c153",
"transactionPosition": 37,
"type": "call",
},
{
"action": {
"from": "0xC0981Df196dc6c6fb8673B912B07956256D7e9fF",
"gas": 245691,
"value": 0,
"callType": "staticcall",
"input": HexBytes(
"0xc87b56dd0000000000000000000000000000000000000000000000000000000000000d88"
),
"to": "0xdEcC60000ba66700a009b8F9F7D82676B5cfA88A",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 217795,
"output": HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000006cd646174613a6170706c69636174696f6e2f6a736f6e3b6261736536342c65794a755957316c496a6f67496c4e6c644852735a57316c626e5167497a4d304e6a51694c4341695a47567a59334a7063485270623234694f694169553256306447786c6257567564484d6759584a6c49474567644856796269426959584e6c5a43426a61585a7062476c7a595852706232346763326c74645778686447397949484e3062334a6c5a43426c626e5270636d5673655342766269426a61474670626977675a3238675a6d3979644767675957356b49474e76626e46315a58497549697767496d6c745957646c496a6f67496d526864474536615731685a32557663335a6e4b3368746244746959584e6c4e6a51735545684f4d6c7035516a52695633683159336f7761574649556a426a5247393254444e6b4d3252354e544e4e655456325932316a646b31715158644e517a6c365a47316a61556c49516e6c61574535735932356162464659546e646156303477565731474d4746584f446c4a626d684f59566331576c52586248564a527a4673576c685261556c49576e426157475244596a4e6e4f556c715157644e51304636546c52425a3031365658644a616a5134597a4e534e574a485653744d626c49305a454e434e306c48576e4269523363325355644b63316c58546e4a5065554a74596a49314d457858576d68695632787a5a5652765a324a584f5856694d3035335756644f62453935516d31694d6a55775446684f6347567456545a4a524556355930686e4e325a5564335a6a4d314931596b64564b314249536d785a4d31466e5a444a73613252485a7a6c4a616b563354554e5661556c4861477868563252765a455177615531555158644b55306c6e576d317363324a454d476c6b4d6d68775a45645661556c444f43745153464a735a5568525a3256454d476c4e56454670535568724f556c715358644a61554a71596b6447656d4e364d476c6b5347677753576f3152466c59516e426b527a6c7a55454d354d46705961444251616e6777576c686f4d456c495a7a6c4a616b563353576c434e5642545354424e51306c6e57544a3461474d7a54546c4a626c49305a454e4a4b314659546a426a62555a7a55454d354d46705961444251616e6777576c686f4d456c495a7a6c4a616b563353576c434e56425453544a4e51306c6e57544a3461474d7a54546c4a626c49305a454e4a4b315258566d746856315979575664334f45777a556d786c5346457255456853624756495557646c524442705456524261556c49617a6c4a616d643353576c43616d4a48526e706a656a42705a45686f4d456c714e56685a57464a7359327033646d5248566a526b524451345a4564574e475244516a525155306c3454554e4a5a3256554d476c4e5645463353576c43616d4a48526e706a656a42705a45686f4d456c714e56685a5745703559566331626c42444f54426157476777554770344d4670596144424a534763355357704664306c70516a565155306c345457704261556c48546e4e5a5745353655464e4b4d47564955576c5161314a7757544e53614752484f586c6a4d6d687759305233646d5248566a526b524451345a4564574e475244516a525155306c3454554e4a5a3256554d476c4e5646463353576c43616d4a48526e706a656a42705a45686f4d456c714e56645a563368765756643463316c5564335a6b523159305a4551304f45777a546a4a61656a51394969776959585230636d6c696458526c6379493657337367496e527959576c3058335235634755694f69416955326c365a53497349434a32595778315a53493649434a445958427064473973496942394c43423749434a30636d4670644639306558426c496a6f67496c4e7761584a706443497349434a32595778315a53493649434a4263335279595777694948307349487367496e527959576c3058335235634755694f6941695157646c49697767496e5a686248566c496a6f67496b316c5a476c6c646d4673496942394c43423749434a30636d4670644639306558426c496a6f67496c4a6c63323931636d4e6c49697767496e5a686248566c496a6f67496c646864475679496942394c43423749434a30636d4670644639306558426c496a6f67496b3176636d46735a53497349434a32595778315a53493649434a5859584a796157356e496942394c43423749434a30636d4670644639306558426c496a6f67496b6476646d5679626d316c626e51694c434169646d4673645755694f69416952476c6a6447463062334a7a61476c77496942394c43423749434a30636d4670644639306558426c496a6f67496c4a6c5957787449697767496e5a686248566c496a6f67496c5a686247686862477868496942395858303d00000000000000000000000000000000000000"
),
},
"subtraces": 0,
"traceAddress": [3],
"transactionHash": "0xf5e2ebf727d16274508ca3bd5f26929327e3a1fec8b6aafae19d42972547c153",
"transactionPosition": 37,
"type": "call",
},
{
"action": {
"from": "0x3907f6bC753b6A0B0ff1C68cdd3595A940a4C16A",
"gas": 0,
"value": 35400000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x9e8b2990f80ce4bAEF5cD6b7049e8cCF02813eB1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xc883289f103aa8ce65340eb880b4e99598707090af3f35ac38d983d59b3c272c",
"transactionPosition": 38,
"type": "call",
},
{
"action": {
"from": "0x3A49309413793b32F6A308769220147feDbFfa5f",
"gas": 24984,
"value": 0,
"callType": "call",
"input": HexBytes(
"0xa22cb465000000000000000000000000e1f3bdd68f24934fe154fcf2c885b58d7cb0eaf60000000000000000000000000000000000000000000000000000000000000001"
),
"to": "0xdDA32aabBBB6c44eFC567baC5F7C35f185338456",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 24984, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xc05e8ce528fd257471f3961ee467d56f80a17f75be6a0b8d11a2bfe25fdb8763",
"transactionPosition": 39,
"type": "call",
},
{
"action": {
"from": "0xd34AE229C5E8493bFC25FA17a7a04A3d72d0a455",
"gas": 0,
"value": 200000000000000000,
"callType": "call",
"input": HexBytes("0x"),
"to": "0x77ACC06250552c8A96e9560670328974386D632F",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 0, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xaae3e3d36a7d210372e22920a103a5cbe695d6c273bfa46eb7bc7bf903c669c4",
"transactionPosition": 40,
"type": "call",
},
{
"action": {
"from": "0xecbeCd7369D708B2fb6489220dd045144F168328",
"gas": 24659,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x095ea7b30000000000000000000000008692e782ea478623f3342e0fb3936f6530c5d54f00000000000000000000000000000000000000000000000000000005b5d429bc"
),
"to": "0x3C4B6E6e1eA3D4863700D7F76b36B7f3D3f13E3d",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 24659,
"output": HexBytes(
"0x0000000000000000000000000000000000000000000000000000000000000001"
),
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x31aecc7e3c6c9062caf6532ec45a9239b85490fa85396d8c00cfc39f55e9138b",
"transactionPosition": 41,
"type": "call",
},
{
"action": {
"from": "0x229D6a31d0CF2225837DB8C82A6c78De5cDe114d",
"gas": 20840,
"value": 0,
"callType": "call",
"input": HexBytes(
"0x23b872dd000000000000000000000000229d6a31d0cf2225837db8c82a6c78de5cde114d000000000000000000000000bc1eb4359ab755af079f6ef77e3faac465e53eda0000000000000000000000000000000000000000000000000000000000010cdd"
),
"to": "0x50f5474724e0Ee42D9a4e711ccFB275809Fd6d4a",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 20840, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0xb26b3e31995258eecbd92e61f1d222bd16369c83eae85a4760776cb5adef26cf",
"transactionPosition": 42,
"type": "call",
},
{
"action": {
"from": "0x1B320348DcF5Fe741161c87BD321f4170Bf5FE45",
"gas": 495819,
"value": 0,
"callType": "call",
"input": HexBytes("0xddd81f82"),
"to": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 376538,
"output": HexBytes(
"0x0000000000000000000000001f4e3e948830f342c9e575155f7929b3512d0788"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0x87a0b62f5d2350e2161abfc9071c68a964272ec288c1e00d564437a01d48ef53",
"transactionPosition": 43,
"type": "call",
},
{
"action": {
"from": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
"gas": 450467,
"value": 0,
"init": HexBytes(
"0x608060405234801561001057600080fd5b506040516105d03803806105d08339810160409081528151602083015191830151909201610046836401000000006100e0810204565b61005882640100000000610102810204565b81600160a060020a03168160405180828051906020019080838360005b8381101561008d578181015183820152602001610075565b50505050905090810190601f1680156100ba5780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156100d857600080fd5b505050610165565b60018054600160a060020a031916600160a060020a0392909216919091179055565b600054600160a060020a038281169116141561011d57600080fd5b60008054600160a060020a031916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b61045c806101746000396000f3006080604052600436106100825763ffffffff7c0100000000000000000000000000000000000000000000000000000000600035041663025313a281146100c85780633659cfe6146100f95780634555d5c91461011c5780634f1ef286146101435780635c60da1b1461019d5780636fde8202146101b2578063f1739cae146101c7575b600061008c6101e8565b9050600160a060020a03811615156100a357600080fd5b60405136600082376000803683855af43d806000843e8180156100c4578184f35b8184fd5b3480156100d457600080fd5b506100dd6101f7565b60408051600160a060020a039092168252519081900360200190f35b34801561010557600080fd5b5061011a600160a060020a0360043516610206565b005b34801561012857600080fd5b50610131610239565b60408051918252519081900360200190f35b60408051602060046024803582810135601f810185900485028601850190965285855261011a958335600160a060020a031695369560449491939091019190819084018382808284375094975061023e9650505050505050565b3480156101a957600080fd5b506100dd6101e8565b3480156101be57600080fd5b506100dd6102f2565b3480156101d357600080fd5b5061011a600160a060020a0360043516610301565b600054600160a060020a031690565b60006102016102f2565b905090565b61020e6101f7565b600160a060020a031633600160a060020a031614151561022d57600080fd5b61023681610391565b50565b600290565b6102466101f7565b600160a060020a031633600160a060020a031614151561026557600080fd5b61026e82610206565b30600160a060020a03168160405180828051906020019080838360005b838110156102a357818101518382015260200161028b565b50505050905090810190601f1680156102d05780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156102ee57600080fd5b5050565b600154600160a060020a031690565b6103096101f7565b600160a060020a031633600160a060020a031614151561032857600080fd5b600160a060020a038116151561033d57600080fd5b7f5a3e66efaa1e445ebd894728a69d6959842ea1e97bd79b892797106e270efcd96103666101f7565b60408051600160a060020a03928316815291841660208301528051918290030190a161023681610401565b600054600160a060020a03828116911614156103ac57600080fd5b6000805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b6001805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555600a165627a7a723058205f26049bbc794226b505f589b2ee1130db54310d79dd8a635c6f6c61e305a77700290000000000000000000000001b320348dcf5fe741161c87bd321f4170bf5fe45000000000000000000000000f9e266af4bca5890e2781812cc6a6e89495a79f200000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044485cc9550000000000000000000000001b320348dcf5fe741161c87bd321f4170bf5fe45000000000000000000000000a5409ec958c83c3f309868babaca7c86dcb077c100000000000000000000000000000000000000000000000000000000"
),
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 317883,
"code": HexBytes(
"0x6080604052600436106100825763ffffffff7c0100000000000000000000000000000000000000000000000000000000600035041663025313a281146100c85780633659cfe6146100f95780634555d5c91461011c5780634f1ef286146101435780635c60da1b1461019d5780636fde8202146101b2578063f1739cae146101c7575b600061008c6101e8565b9050600160a060020a03811615156100a357600080fd5b60405136600082376000803683855af43d806000843e8180156100c4578184f35b8184fd5b3480156100d457600080fd5b506100dd6101f7565b60408051600160a060020a039092168252519081900360200190f35b34801561010557600080fd5b5061011a600160a060020a0360043516610206565b005b34801561012857600080fd5b50610131610239565b60408051918252519081900360200190f35b60408051602060046024803582810135601f810185900485028601850190965285855261011a958335600160a060020a031695369560449491939091019190819084018382808284375094975061023e9650505050505050565b3480156101a957600080fd5b506100dd6101e8565b3480156101be57600080fd5b506100dd6102f2565b3480156101d357600080fd5b5061011a600160a060020a0360043516610301565b600054600160a060020a031690565b60006102016102f2565b905090565b61020e6101f7565b600160a060020a031633600160a060020a031614151561022d57600080fd5b61023681610391565b50565b600290565b6102466101f7565b600160a060020a031633600160a060020a031614151561026557600080fd5b61026e82610206565b30600160a060020a03168160405180828051906020019080838360005b838110156102a357818101518382015260200161028b565b50505050905090810190601f1680156102d05780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156102ee57600080fd5b5050565b600154600160a060020a031690565b6103096101f7565b600160a060020a031633600160a060020a031614151561032857600080fd5b600160a060020a038116151561033d57600080fd5b7f5a3e66efaa1e445ebd894728a69d6959842ea1e97bd79b892797106e270efcd96103666101f7565b60408051600160a060020a03928316815291841660208301528051918290030190a161023681610401565b600054600160a060020a03828116911614156103ac57600080fd5b6000805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b6001805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555600a165627a7a723058205f26049bbc794226b505f589b2ee1130db54310d79dd8a635c6f6c61e305a7770029"
),
"address": "0x1f4E3e948830F342c9E575155f7929b3512D0788",
},
"subtraces": 1,
"traceAddress": [0],
"transactionHash": "0x87a0b62f5d2350e2161abfc9071c68a964272ec288c1e00d564437a01d48ef53",
"transactionPosition": 43,
"type": "create",
},
{
"action": {
"from": "0x1f4E3e948830F342c9E575155f7929b3512D0788",
"gas": 394870,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x485cc9550000000000000000000000001b320348dcf5fe741161c87bd321f4170bf5fe45000000000000000000000000a5409ec958c83c3f309868babaca7c86dcb077c100000000000000000000000000000000000000000000000000000000"
),
"to": "0xF9e266af4BcA5890e2781812cc6a6E89495a79f2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 45120, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [0, 0],
"transactionHash": "0x87a0b62f5d2350e2161abfc9071c68a964272ec288c1e00d564437a01d48ef53",
"transactionPosition": 43,
"type": "call",
},
{
"action": {
"from": "0x26B675Fc79EA35805b6594857c429CFe2D5f1509",
"gas": 495819,
"value": 0,
"callType": "call",
"input": HexBytes("0xddd81f82"),
"to": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 376538,
"output": HexBytes(
"0x00000000000000000000000050198a0c9de7d342fdbb24f57242dddf25b2d1b6"
),
},
"subtraces": 1,
"traceAddress": [],
"transactionHash": "0x73c429f04ca56cd06ec53cfdaf845faf6f284e368eeeb310b1b7b825fa70a6db",
"transactionPosition": 44,
"type": "call",
},
{
"action": {
"from": "0xa5409ec958C83C3f309868babACA7c86DCB077c1",
"gas": 450467,
"value": 0,
"init": HexBytes(
"0x608060405234801561001057600080fd5b506040516105d03803806105d08339810160409081528151602083015191830151909201610046836401000000006100e0810204565b61005882640100000000610102810204565b81600160a060020a03168160405180828051906020019080838360005b8381101561008d578181015183820152602001610075565b50505050905090810190601f1680156100ba5780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156100d857600080fd5b505050610165565b60018054600160a060020a031916600160a060020a0392909216919091179055565b600054600160a060020a038281169116141561011d57600080fd5b60008054600160a060020a031916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b61045c806101746000396000f3006080604052600436106100825763ffffffff7c0100000000000000000000000000000000000000000000000000000000600035041663025313a281146100c85780633659cfe6146100f95780634555d5c91461011c5780634f1ef286146101435780635c60da1b1461019d5780636fde8202146101b2578063f1739cae146101c7575b600061008c6101e8565b9050600160a060020a03811615156100a357600080fd5b60405136600082376000803683855af43d806000843e8180156100c4578184f35b8184fd5b3480156100d457600080fd5b506100dd6101f7565b60408051600160a060020a039092168252519081900360200190f35b34801561010557600080fd5b5061011a600160a060020a0360043516610206565b005b34801561012857600080fd5b50610131610239565b60408051918252519081900360200190f35b60408051602060046024803582810135601f810185900485028601850190965285855261011a958335600160a060020a031695369560449491939091019190819084018382808284375094975061023e9650505050505050565b3480156101a957600080fd5b506100dd6101e8565b3480156101be57600080fd5b506100dd6102f2565b3480156101d357600080fd5b5061011a600160a060020a0360043516610301565b600054600160a060020a031690565b60006102016102f2565b905090565b61020e6101f7565b600160a060020a031633600160a060020a031614151561022d57600080fd5b61023681610391565b50565b600290565b6102466101f7565b600160a060020a031633600160a060020a031614151561026557600080fd5b61026e82610206565b30600160a060020a03168160405180828051906020019080838360005b838110156102a357818101518382015260200161028b565b50505050905090810190601f1680156102d05780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156102ee57600080fd5b5050565b600154600160a060020a031690565b6103096101f7565b600160a060020a031633600160a060020a031614151561032857600080fd5b600160a060020a038116151561033d57600080fd5b7f5a3e66efaa1e445ebd894728a69d6959842ea1e97bd79b892797106e270efcd96103666101f7565b60408051600160a060020a03928316815291841660208301528051918290030190a161023681610401565b600054600160a060020a03828116911614156103ac57600080fd5b6000805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b6001805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555600a165627a7a723058205f26049bbc794226b505f589b2ee1130db54310d79dd8a635c6f6c61e305a777002900000000000000000000000026b675fc79ea35805b6594857c429cfe2d5f1509000000000000000000000000f9e266af4bca5890e2781812cc6a6e89495a79f200000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044485cc95500000000000000000000000026b675fc79ea35805b6594857c429cfe2d5f1509000000000000000000000000a5409ec958c83c3f309868babaca7c86dcb077c100000000000000000000000000000000000000000000000000000000"
),
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {
"gasUsed": 317883,
"code": HexBytes(
"0x6080604052600436106100825763ffffffff7c0100000000000000000000000000000000000000000000000000000000600035041663025313a281146100c85780633659cfe6146100f95780634555d5c91461011c5780634f1ef286146101435780635c60da1b1461019d5780636fde8202146101b2578063f1739cae146101c7575b600061008c6101e8565b9050600160a060020a03811615156100a357600080fd5b60405136600082376000803683855af43d806000843e8180156100c4578184f35b8184fd5b3480156100d457600080fd5b506100dd6101f7565b60408051600160a060020a039092168252519081900360200190f35b34801561010557600080fd5b5061011a600160a060020a0360043516610206565b005b34801561012857600080fd5b50610131610239565b60408051918252519081900360200190f35b60408051602060046024803582810135601f810185900485028601850190965285855261011a958335600160a060020a031695369560449491939091019190819084018382808284375094975061023e9650505050505050565b3480156101a957600080fd5b506100dd6101e8565b3480156101be57600080fd5b506100dd6102f2565b3480156101d357600080fd5b5061011a600160a060020a0360043516610301565b600054600160a060020a031690565b60006102016102f2565b905090565b61020e6101f7565b600160a060020a031633600160a060020a031614151561022d57600080fd5b61023681610391565b50565b600290565b6102466101f7565b600160a060020a031633600160a060020a031614151561026557600080fd5b61026e82610206565b30600160a060020a03168160405180828051906020019080838360005b838110156102a357818101518382015260200161028b565b50505050905090810190601f1680156102d05780820380516001836020036101000a031916815260200191505b50915050600060405180830381855af491505015156102ee57600080fd5b5050565b600154600160a060020a031690565b6103096101f7565b600160a060020a031633600160a060020a031614151561032857600080fd5b600160a060020a038116151561033d57600080fd5b7f5a3e66efaa1e445ebd894728a69d6959842ea1e97bd79b892797106e270efcd96103666101f7565b60408051600160a060020a03928316815291841660208301528051918290030190a161023681610401565b600054600160a060020a03828116911614156103ac57600080fd5b6000805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a038316908117825560405190917fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b91a250565b6001805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555600a165627a7a723058205f26049bbc794226b505f589b2ee1130db54310d79dd8a635c6f6c61e305a7770029"
),
"address": "0x50198a0C9De7d342FDbb24F57242dDDf25B2d1b6",
},
"subtraces": 1,
"traceAddress": [0],
"transactionHash": "0x73c429f04ca56cd06ec53cfdaf845faf6f284e368eeeb310b1b7b825fa70a6db",
"transactionPosition": 44,
"type": "create",
},
{
"action": {
"from": "0x50198a0C9De7d342FDbb24F57242dDDf25B2d1b6",
"gas": 394870,
"value": 0,
"callType": "delegatecall",
"input": HexBytes(
"0x485cc95500000000000000000000000026b675fc79ea35805b6594857c429cfe2d5f1509000000000000000000000000a5409ec958c83c3f309868babaca7c86dcb077c100000000000000000000000000000000000000000000000000000000"
),
"to": "0xF9e266af4BcA5890e2781812cc6a6E89495a79f2",
},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": {"gasUsed": 45120, "output": HexBytes("0x")},
"subtraces": 0,
"traceAddress": [0, 0],
"transactionHash": "0x73c429f04ca56cd06ec53cfdaf845faf6f284e368eeeb310b1b7b825fa70a6db",
"transactionPosition": 44,
"type": "call",
},
{
"action": {"value": 2000000000000000000},
"blockHash": "0x8f9809f6012f85803956a419e2e54914dfdebba33e4f7a0d1574b12e92499c0e",
"blockNumber": 13191781,
"result": None,
"subtraces": 0,
"traceAddress": [],
"transactionHash": None,
"transactionPosition": None,
"type": "reward",
},
]
| 56.613328
| 4,828
| 0.721074
| 3,970
| 141,873
| 25.767003
| 0.13073
| 0.011242
| 0.015465
| 0.019532
| 0.498666
| 0.493367
| 0.355335
| 0.338101
| 0.258849
| 0.245388
| 0
| 0.597366
| 0.199777
| 141,873
| 2,505
| 4,829
| 56.635928
| 0.303673
| 0
| 0
| 0.663604
| 0
| 0
| 0.685451
| 0.568184
| 0
| 1
| 0.56896
| 0
| 0
| 1
| 0
| false
| 0
| 0.0004
| 0
| 0.0004
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8080836038417051027ce4d9642f78e280dd24df
| 147
|
py
|
Python
|
unittest_reinvent/sample_model_tests/__init__.py
|
fujirock/Reinvent
|
9c57636f9d32b4ce5b75670f43906a70d5daf886
|
[
"MIT"
] | 4
|
2021-05-11T05:34:01.000Z
|
2022-03-30T10:04:21.000Z
|
unittest_reinvent/sample_model_tests/__init__.py
|
prasannavd/Reinvent
|
ca02ebee8d8ed83223c55f4a1dd1b3fbc2359616
|
[
"MIT"
] | null | null | null |
unittest_reinvent/sample_model_tests/__init__.py
|
prasannavd/Reinvent
|
ca02ebee8d8ed83223c55f4a1dd1b3fbc2359616
|
[
"MIT"
] | 2
|
2021-06-01T11:56:10.000Z
|
2021-10-05T04:33:56.000Z
|
from unittest_reinvent.sample_model_tests.test_sample_from_model import *
from unittest_reinvent.sample_model_tests.test_sample_logger_tbx import *
| 73.5
| 73
| 0.911565
| 22
| 147
| 5.545455
| 0.454545
| 0.196721
| 0.327869
| 0.42623
| 0.754098
| 0.754098
| 0.754098
| 0.754098
| 0
| 0
| 0
| 0
| 0.047619
| 147
| 2
| 74
| 73.5
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
80c35c2732b639a1a0facd33ff4897fdca547cef
| 1,662
|
py
|
Python
|
ex21_bsearch/test_bsearch.py
|
techieguy007/learn-more-python-the-hard-way-solutions
|
7886c860f69d69739a41d6490b8dc3fa777f227b
|
[
"Zed",
"Unlicense"
] | 466
|
2016-11-01T19:40:59.000Z
|
2022-03-23T16:34:13.000Z
|
ex21_bsearch/test_bsearch.py
|
Desperaaado/learn-more-python-the-hard-way-solutions
|
7886c860f69d69739a41d6490b8dc3fa777f227b
|
[
"Zed",
"Unlicense"
] | 2
|
2017-09-20T09:01:53.000Z
|
2017-09-21T15:03:56.000Z
|
ex21_bsearch/test_bsearch.py
|
Desperaaado/learn-more-python-the-hard-way-solutions
|
7886c860f69d69739a41d6490b8dc3fa777f227b
|
[
"Zed",
"Unlicense"
] | 241
|
2017-06-17T08:02:26.000Z
|
2022-03-30T09:09:39.000Z
|
import bsearch
def test_bsearch_list():
data = sorted([5,3,7,1,9,20])
assert bsearch.search_list(data, 5) == (5, 2)
assert bsearch.search_list(data, 1) == (1, 0)
assert bsearch.search_list(data, 20) == (20, len(data) - 1)
assert bsearch.search_list(data, 9) == (9, 4)
assert bsearch.search_list(data, 100) == (None, -1)
assert bsearch.search_list(data, -1) == (None, -1)
def test_bsearch_list_iter():
data = sorted([5,3,7,1,9,20])
assert bsearch.search_list_iter(data, 5) == (5, 2)
assert bsearch.search_list_iter(data, 1) == (1, 0)
assert bsearch.search_list_iter(data, 20) == (20, len(data) - 1)
assert bsearch.search_list_iter(data, 9) == (9, 4)
assert bsearch.search_list_iter(data, 100) == (None, -1)
assert bsearch.search_list_iter(data, -1) == (None, -1)
def test_bsearch_dllist():
data = sorted([5,3,7,1,9,20])
assert bsearch.search_dllist(data, 5) == (5, 2)
assert bsearch.search_dllist(data, 1) == (1, 0)
assert bsearch.search_dllist(data, 20) == (20, len(data) - 1)
assert bsearch.search_dllist(data, 9) == (9, 4)
assert bsearch.search_dllist(data, 100) == (None, -1)
assert bsearch.search_dllist(data, -1) == (None, -1)
def test_btree_search():
# for btree, adding sorted data just makes it a list
data = [5,3,7,1,9,20]
assert bsearch.search_btree(data, 5) == (5, 0)
assert bsearch.search_btree(data, 1) == (1, 3)
assert bsearch.search_btree(data, 20) == (20, len(data) - 1)
assert bsearch.search_btree(data, 9) == (9, 4)
assert bsearch.search_btree(data, 100) == (None, -1)
assert bsearch.search_btree(data, -1) == (None, -1)
| 39.571429
| 68
| 0.638989
| 266
| 1,662
| 3.845865
| 0.112782
| 0.304985
| 0.445748
| 0.269795
| 0.892473
| 0.837732
| 0.748778
| 0.527859
| 0.28348
| 0.189638
| 0
| 0.079764
| 0.185319
| 1,662
| 41
| 69
| 40.536585
| 0.675775
| 0.030084
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.727273
| 1
| 0.121212
| false
| 0
| 0.030303
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80d71540452058a70a45dfeeae37cb00c5bf8474
| 20,392
|
py
|
Python
|
pyro/dynamic/massspringdamper.py
|
SherbyRobotics/pyro
|
43dcb112427978ff237ea14018bcf7f8a8d89997
|
[
"MIT"
] | 14
|
2019-05-03T15:22:38.000Z
|
2022-03-14T15:31:54.000Z
|
pyro/dynamic/massspringdamper.py
|
Scientist265/pyro
|
43dcb112427978ff237ea14018bcf7f8a8d89997
|
[
"MIT"
] | 9
|
2019-08-01T14:22:13.000Z
|
2021-06-12T01:44:50.000Z
|
pyro/dynamic/massspringdamper.py
|
Scientist265/pyro
|
43dcb112427978ff237ea14018bcf7f8a8d89997
|
[
"MIT"
] | 9
|
2019-05-21T12:38:36.000Z
|
2022-03-29T16:28:45.000Z
|
# -*- coding: utf-8 -*-
"""
Created on 4 Jun 2021
@author: Alexandre
"""
import numpy as np
from pyro.dynamic import statespace
###############################################################################
class SingleMass( statespace.StateSpaceSystem ):
"""Single Mass with linear spring and damper
Attributes
----------
"""
############################
def __init__(self, m=1, k=2, b=0):
""" """
# params
self.m = m
self.k = k
self.b = b
self.l1 = 2
self.l2 = 1
# Matrix ABCD
self.compute_ABCD()
# initialize standard params
super().__init__(self.A, self.B, self.C, self.D)
# Name and labels
self.name = 'Linear-Spring-Damper'
self.input_label = [ 'Force']
self.input_units = [ '[N]']
self.output_label = ['Position']
self.output_units = ['[m]']
self.state_label = [ 'Position','Velocity']
self.state_units = [ '[m]', '[m/s]']
self.linestyle = '-'
###########################################################################
def compute_ABCD(self):
"""
"""
self.A = np.array([ [ 0 , 1 ],
[ -self.k/self.m , -self.b/self.m ] ])
self.B = np.array([ [ 0 ],
[ 1 /self.m ]])
self.C = np.array([ [ 1 , 0 ]])
self.D = np.array([ [ 0 ]])
###########################################################################
# Graphical output
###########################################################################
#############################
def xut2q( self, x , u , t ):
""" Compute configuration variables ( q vector ) """
q = np.array([ x[0], u[0] ]) # Hack to illustrate force vector
return q
###########################################################################
def forward_kinematic_domain(self, q ):
"""
"""
l = self.l1 * 2
domain = [ (-l,l) , (-l,l) , (-l,l) ]#
return domain
###########################################################################
def forward_kinematic_lines(self, q ):
"""
Compute points p = [x;y;z] positions given config q
----------------------------------------------------
- points of interest for ploting
Outpus:
lines_pts = [] : a list of array (n_pts x 3) for each lines
"""
lines_pts = [] # list of array (n_pts x 3) for each lines
# ground line
pts = np.zeros(( 2 , 3 ))
pts[0,:] = np.array([-self.l1,-self.l2,0])
pts[1,:] = np.array([-self.l1,+self.l2,0])
lines_pts.append( pts )
# mass
pts = np.zeros(( 5 , 3 ))
pts[0,:] = np.array([q[0] - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([q[0] + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([q[0] + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([q[0] - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
# spring
pts = np.zeros(( 15 , 3 ))
d = q[0] + self.l1 - self.l2/2
h = self.l2 / 3
pts[0,:] = np.array([d*0.00 - self.l1,0,0])
pts[1,:] = np.array([d*0.20 - self.l1,0,0])
pts[2,:] = np.array([d*0.25 - self.l1,+h,0])
pts[3,:] = np.array([d*0.30 - self.l1,-h,0])
pts[4,:] = np.array([d*0.35 - self.l1,+h,0])
pts[5,:] = np.array([d*0.40 - self.l1,-h,0])
pts[6,:] = np.array([d*0.45 - self.l1,+h,0])
pts[7,:] = np.array([d*0.50 - self.l1,-h,0])
pts[8,:] = np.array([d*0.55 - self.l1,+h,0])
pts[9,:] = np.array([d*0.60 - self.l1,-h,0])
pts[10,:] = np.array([d*0.65 - self.l1,+h,0])
pts[11,:] = np.array([d*0.70 - self.l1,-h,0])
pts[12,:] = np.array([d*0.75 - self.l1,+h,0])
pts[13,:] = np.array([d*0.80 - self.l1,0,0])
pts[14,:] = np.array([d*1.00 - self.l1,0,0])
lines_pts.append( pts )
# force arrow
pts = np.zeros(( 5 , 3 ))
pts[0,:] = np.array([q[0] + self.l2/2,0,0])
pts[1,:] = np.array([q[0] + self.l2/2 + q[1],0,0])
pts[2,:] = np.array([q[0] + self.l2/2 + q[1] - self.l2/4*q[1],+self.l2/4*q[1],0])
pts[3,:] = np.array([q[0] + self.l2/2 + q[1],0,0])
pts[4,:] = np.array([q[0] + self.l2/2 + q[1] - self.l2/4*q[1],-self.l2/4*q[1],0])
lines_pts.append( pts )
return lines_pts
class TwoMass( statespace.StateSpaceSystem ):
"""Two Mass with linear spring and damper
Attributes
----------
"""
############################
def __init__(self, m=1, k=2, b=0.2):
""" """
# params
self.m1 = m
self.k1 = k
self.b1 = b
self.m2 = m
self.k2 = k
self.b2 = b
self.l1 = 2
self.l2 = 1
# Matrix ABCD
self.compute_ABCD()
# initialize standard params
super().__init__(self.A, self.B, self.C, self.D)
# Name and labels
self.name = 'Two mass with linear spring-dampers'
self.input_label = ['Force']
self.input_units = ['[N]']
self.output_label = ['x2']
self.output_units = ['[m]']
self.state_label = [ 'x1','x2', 'dx1', 'dx2']
self.state_units = [ '[m]', '[m]', '[m/s]', '[m/s]']
self.linestyle = '-'
###########################################################################
def compute_ABCD(self):
"""
"""
self.A = np.array([ [ 0, 0, 1, 0 ],
[ 0, 0, 0, 1 ],
[ -(self.k1+self.k2)/self.m1, +self.k2/self.m1, -self.b1/self.m1, 0],
[ +self.k2/self.m2, -self.k2/self.m2, 0, -self.b2/self.m2]])
self.B = np.array([ [ 0 ],
[ 0 ],
[ 0 ],
[ 1/self.m2 ]])
self.C = np.array([ [ 0 , 1 , 0 , 0 ]])
self.D = np.array([ [ 0 ]])
###########################################################################
# Graphical output
###########################################################################
#############################
def xut2q( self, x , u , t ):
""" Compute configuration variables ( q vector ) """
q = np.array([ x[0], x[1], u[0] ])
return q
###########################################################################
def forward_kinematic_domain(self, q ):
"""
"""
l = self.l1 * 3
domain = [ (-l,l) , (-l,l) , (-l,l) ]#
return domain
###########################################################################
def forward_kinematic_lines(self, q ):
"""
Compute points p = [x;y;z] positions given config q
----------------------------------------------------
- points of interest for ploting
Outpus:
lines_pts = [] : a list of array (n_pts x 3) for each lines
"""
lines_pts = [] # list of array (n_pts x 3) for each lines
# ground line
pts = np.zeros(( 2 , 3 ))
pts[0,:] = np.array([-self.l1*2,-self.l2,0])
pts[1,:] = np.array([-self.l1*2,+self.l2,0])
lines_pts.append( pts )
# mass 1
pts = np.zeros(( 5 , 3 ))
x1 = q[0] - self.l1
pts[0,:] = np.array([ x1 - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([ x1 + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([ x1 + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([ x1 - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
# mass 2
pts = np.zeros(( 5 , 3 ))
x2 = q[1]
pts[0,:] = np.array([x2 - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([x2 + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([x2 + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([x2 - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
# spring 1
pts = np.zeros(( 15 , 3 ))
d = q[0] + self.l1 - self.l2/2
h = self.l2 / 3
pts[0,:] = np.array([d*0.00 - self.l1*2,0,0])
pts[1,:] = np.array([d*0.20 - self.l1*2,0,0])
pts[2,:] = np.array([d*0.25 - self.l1*2,+h,0])
pts[3,:] = np.array([d*0.30 - self.l1*2,-h,0])
pts[4,:] = np.array([d*0.35 - self.l1*2,+h,0])
pts[5,:] = np.array([d*0.40 - self.l1*2,-h,0])
pts[6,:] = np.array([d*0.45 - self.l1*2,+h,0])
pts[7,:] = np.array([d*0.50 - self.l1*2,-h,0])
pts[8,:] = np.array([d*0.55 - self.l1*2,+h,0])
pts[9,:] = np.array([d*0.60 - self.l1*2,-h,0])
pts[10,:] = np.array([d*0.65 - self.l1*2,+h,0])
pts[11,:] = np.array([d*0.70 - self.l1*2,-h,0])
pts[12,:] = np.array([d*0.75 - self.l1*2,+h,0])
pts[13,:] = np.array([d*0.80 - self.l1*2,0,0])
pts[14,:] = np.array([d*1.00 - self.l1*2,0,0])
lines_pts.append( pts )
# spring 2
pts = np.zeros(( 15 , 3 ))
d = q[1] - q[0] + self.l1 - self.l2
h = self.l2 / 3
pts[0,:] = np.array([d*0.00 + x1 + self.l2/2,0,0])
pts[1,:] = np.array([d*0.20 + x1+self.l2/2,0,0])
pts[2,:] = np.array([d*0.25 + x1+self.l2/2,+h,0])
pts[3,:] = np.array([d*0.30 + x1+self.l2/2,-h,0])
pts[4,:] = np.array([d*0.35 + x1+self.l2/2,+h,0])
pts[5,:] = np.array([d*0.40 + x1+self.l2/2,-h,0])
pts[6,:] = np.array([d*0.45 + x1+self.l2/2,+h,0])
pts[7,:] = np.array([d*0.50 + x1+self.l2/2,-h,0])
pts[8,:] = np.array([d*0.55 + x1+self.l2/2,+h,0])
pts[9,:] = np.array([d*0.60 + x1+self.l2/2,-h,0])
pts[10,:] = np.array([d*0.65 + x1+self.l2/2,+h,0])
pts[11,:] = np.array([d*0.70 + x1+self.l2/2,-h,0])
pts[12,:] = np.array([d*0.75 + x1+self.l2/2,+h,0])
pts[13,:] = np.array([d*0.80 + x1+self.l2/2,0,0])
pts[14,:] = np.array([d*1.00 + x1+self.l2/2,0,0])
lines_pts.append( pts )
# force arrow
pts = np.zeros(( 5 , 3 ))
pts[0,:] = np.array([q[1] + self.l2/2,0,0])
pts[1,:] = np.array([q[1] + self.l2/2 + q[2],0,0])
pts[2,:] = np.array([q[1] + self.l2/2 + q[2] - self.l2/4*q[2],+self.l2/4*q[2],0])
pts[3,:] = np.array([q[1] + self.l2/2 + q[2],0,0])
pts[4,:] = np.array([q[1] + self.l2/2 + q[2] - self.l2/4*q[2],-self.l2/4*q[2],0])
lines_pts.append( pts )
return lines_pts
class ThreeMass( statespace.StateSpaceSystem ):
"""Three Mass with linear spring and damper
Attributes
----------
"""
############################
def __init__(self, m=1, k=2, b=0.2):
""" """
# params
self.m1 = m
self.k1 = k
self.b1 = b
self.m2 = m
self.k2 = k
self.b2 = b
self.m3 = m
self.k3 = k
self.b3 = b
self.l1 = 2
self.l2 = 1
# Matrix ABCD
self.compute_ABCD()
# initialize standard params
super().__init__(self.A, self.B, self.C, self.D)
# Name and labels
self.name = 'Three mass with linear spring-dampers'
self.input_label = ['Force']
self.input_units = ['[N]']
self.output_label = ['x3']
self.output_units = ['[m]']
self.state_label = [ 'x1','x2', 'x3', 'dx1', 'dx2', 'dx3']
self.state_units = [ '[m]', '[m]', '[m]', '[m/s]', '[m/s]','[m/s]']
self.linestyle = '-'
###########################################################################
def compute_ABCD(self):
"""
"""
k1 = self.k1
k2 = self.k2
k3 = self.k3
m1 = self.m1
m2 = self.m2
m3 = self.m3
b1 = self.b1
b2 = self.b2
b3 = self.b3
self.A = np.array([ [ 0, 0, 0, 1, 0, 0 ],
[ 0, 0, 0, 0, 1, 0 ],
[ 0, 0, 0, 0, 0, 1 ],
[ -(k1+k2)/m1, +k2/m1, 0, -b1/m1, 0, 0 ],
[ +k2/m2, -(k2+k3)/m2, +k3/m2, 0, -b2/m2, 0 ],
[ 0, +k3/m3, -k3/m3, 0, 0, -b3/m3 ]])
self.B = np.array([ [ 0 ],
[ 0 ],
[ 0 ],
[ 0 ],
[ 0 ],
[ 1/m2 ]])
self.C = np.array([ [ 0 , 0 , 1 , 0 , 0 , 0 ]])
self.D = np.array([ [ 0 ]])
###########################################################################
# Graphical output
###########################################################################
#############################
def xut2q( self, x , u , t ):
""" Compute configuration variables ( q vector ) """
q = np.array([ x[0], x[1], x[2], u[0] ])
return q
###########################################################################
def forward_kinematic_domain(self, q ):
"""
"""
l = self.l1 * 3
domain = [ (-l,l) , (-l,l) , (-l,l) ]#
return domain
###########################################################################
def forward_kinematic_lines(self, q ):
"""
Compute points p = [x;y;z] positions given config q
----------------------------------------------------
- points of interest for ploting
Outpus:
lines_pts = [] : a list of array (n_pts x 3) for each lines
"""
lines_pts = [] # list of array (n_pts x 3) for each lines
# ground line
pts = np.zeros(( 2 , 3 ))
pts[0,:] = np.array([-self.l1*2,-self.l2,0])
pts[1,:] = np.array([-self.l1*2,+self.l2,0])
lines_pts.append( pts )
# mass 1
pts = np.zeros(( 5 , 3 ))
x1 = q[0] - self.l1
pts[0,:] = np.array([ x1 - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([ x1 + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([ x1 + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([ x1 - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
# mass 2
pts = np.zeros(( 5 , 3 ))
x2 = q[1]
pts[0,:] = np.array([x2 - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([x2 + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([x2 + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([x2 - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
#mass 3
pts = np.zeros(( 5 , 3 ))
x3 = q[2] + self.l1
pts[0,:] = np.array([x3 - self.l2/2,+self.l2/2,0])
pts[1,:] = np.array([x3 + self.l2/2,+self.l2/2,0])
pts[2,:] = np.array([x3 + self.l2/2,-self.l2/2,0])
pts[3,:] = np.array([x3 - self.l2/2,-self.l2/2,0])
pts[4,:] = pts[0,:]
lines_pts.append( pts )
# spring 1
pts = np.zeros(( 15 , 3 ))
d = q[0] + self.l1 - self.l2/2
h = self.l2 / 3
pts[0,:] = np.array([d*0.00 - self.l1*2,0,0])
pts[1,:] = np.array([d*0.20 - self.l1*2,0,0])
pts[2,:] = np.array([d*0.25 - self.l1*2,+h,0])
pts[3,:] = np.array([d*0.30 - self.l1*2,-h,0])
pts[4,:] = np.array([d*0.35 - self.l1*2,+h,0])
pts[5,:] = np.array([d*0.40 - self.l1*2,-h,0])
pts[6,:] = np.array([d*0.45 - self.l1*2,+h,0])
pts[7,:] = np.array([d*0.50 - self.l1*2,-h,0])
pts[8,:] = np.array([d*0.55 - self.l1*2,+h,0])
pts[9,:] = np.array([d*0.60 - self.l1*2,-h,0])
pts[10,:] = np.array([d*0.65 - self.l1*2,+h,0])
pts[11,:] = np.array([d*0.70 - self.l1*2,-h,0])
pts[12,:] = np.array([d*0.75 - self.l1*2,+h,0])
pts[13,:] = np.array([d*0.80 - self.l1*2,0,0])
pts[14,:] = np.array([d*1.00 - self.l1*2,0,0])
lines_pts.append( pts )
# spring 2
pts = np.zeros(( 15 , 3 ))
d = q[1] - q[0] + self.l1 - self.l2
pts[0,:] = np.array([d*0.00 + x1 + self.l2/2,0,0])
pts[1,:] = np.array([d*0.20 + x1+self.l2/2,0,0])
pts[2,:] = np.array([d*0.25 + x1+self.l2/2,+h,0])
pts[3,:] = np.array([d*0.30 + x1+self.l2/2,-h,0])
pts[4,:] = np.array([d*0.35 + x1+self.l2/2,+h,0])
pts[5,:] = np.array([d*0.40 + x1+self.l2/2,-h,0])
pts[6,:] = np.array([d*0.45 + x1+self.l2/2,+h,0])
pts[7,:] = np.array([d*0.50 + x1+self.l2/2,-h,0])
pts[8,:] = np.array([d*0.55 + x1+self.l2/2,+h,0])
pts[9,:] = np.array([d*0.60 + x1+self.l2/2,-h,0])
pts[10,:] = np.array([d*0.65 + x1+self.l2/2,+h,0])
pts[11,:] = np.array([d*0.70 + x1+self.l2/2,-h,0])
pts[12,:] = np.array([d*0.75 + x1+self.l2/2,+h,0])
pts[13,:] = np.array([d*0.80 + x1+self.l2/2,0,0])
pts[14,:] = np.array([d*1.00 + x1+self.l2/2,0,0])
lines_pts.append( pts )
# spring 3
pts = np.zeros(( 15 , 3 ))
d = q[2] - q[1] + self.l1 - self.l2
pts[0,:] = np.array([d*0.00 + x2 + self.l2/2,0,0])
pts[1,:] = np.array([d*0.20 + x2+self.l2/2,0,0])
pts[2,:] = np.array([d*0.25 + x2+self.l2/2,+h,0])
pts[3,:] = np.array([d*0.30 + x2+self.l2/2,-h,0])
pts[4,:] = np.array([d*0.35 + x2+self.l2/2,+h,0])
pts[5,:] = np.array([d*0.40 + x2+self.l2/2,-h,0])
pts[6,:] = np.array([d*0.45 + x2+self.l2/2,+h,0])
pts[7,:] = np.array([d*0.50 + x2+self.l2/2,-h,0])
pts[8,:] = np.array([d*0.55 + x2+self.l2/2,+h,0])
pts[9,:] = np.array([d*0.60 + x2+self.l2/2,-h,0])
pts[10,:] = np.array([d*0.65 + x2+self.l2/2,+h,0])
pts[11,:] = np.array([d*0.70 + x2+self.l2/2,-h,0])
pts[12,:] = np.array([d*0.75 + x2+self.l2/2,+h,0])
pts[13,:] = np.array([d*0.80 + x2+self.l2/2,0,0])
pts[14,:] = np.array([d*1.00 + x2+self.l2/2,0,0])
lines_pts.append( pts )
# force arrow
pts = np.zeros(( 5 , 3 ))
pts[0,:] = np.array([x3 + self.l2/2,0,0])
pts[1,:] = np.array([x3 + self.l2/2 + q[3],0,0])
pts[2,:] = np.array([x3 + self.l2/2 + q[3] - self.l2/4*q[3],+self.l2/4*q[3],0])
pts[3,:] = np.array([x3 + self.l2/2 + q[3],0,0])
pts[4,:] = np.array([x3 + self.l2/2 + q[3] - self.l2/4*q[3],-self.l2/4*q[3],0])
lines_pts.append( pts )
return lines_pts
'''
#################################################################
################## Main ########
#################################################################
'''
if __name__ == "__main__":
""" MAIN TEST """
sys = SingleMass()
#sys = TwoMass()
def t2u(t):
return np.array([t])
sys.t2u = t2u
sys.x0 = np.array([1,0])
sys.plot_phase_plane()
sys.plot_linearized_bode()
sys.plot_linearized_pz_map()
sys.plot_trajectory('xu')
sys.animate_simulation()
| 33.211726
| 98
| 0.371812
| 2,894
| 20,392
| 2.581894
| 0.057015
| 0.142398
| 0.103988
| 0.101178
| 0.901365
| 0.88758
| 0.876874
| 0.861617
| 0.849973
| 0.825616
| 0
| 0.102446
| 0.350432
| 20,392
| 614
| 99
| 33.211726
| 0.461649
| 0.081159
| 0
| 0.635015
| 0
| 0
| 0.014281
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047478
| false
| 0
| 0.005935
| 0.002967
| 0.091988
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03f535e1ded1c2363b3c0080f1b5b60cdeb7ac23
| 18,296
|
py
|
Python
|
climateeconomics/tests/l1_test_gradient_forest_v2_discipline.py
|
os-climate/witness-core
|
3ef9a44d86804c5ad57deec3c9916348cb3bfbb8
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2022-01-14T06:37:42.000Z
|
2022-01-14T06:37:42.000Z
|
climateeconomics/tests/l1_test_gradient_forest_v2_discipline.py
|
os-climate/witness-core
|
3ef9a44d86804c5ad57deec3c9916348cb3bfbb8
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
climateeconomics/tests/l1_test_gradient_forest_v2_discipline.py
|
os-climate/witness-core
|
3ef9a44d86804c5ad57deec3c9916348cb3bfbb8
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import numpy as np
import pandas as pd
from os.path import join, dirname
from climateeconomics.core.core_forest.forest_v2 import Forest
from sos_trades_core.execution_engine.execution_engine import ExecutionEngine
from sos_trades_core.tests.core.abstract_jacobian_unit_test import AbstractJacobianUnittest
class ForestJacobianDiscTest(AbstractJacobianUnittest):
AbstractJacobianUnittest.DUMP_JACOBIAN = True
# np.set_printoptions(threshold=np.inf)
def setUp(self):
self.name = 'Test'
self.ee = ExecutionEngine(self.name)
def analytic_grad_entry(self):
return [
self.test_forest_analytic_grad
]
def _test_forest_analytic_grad(self):
model_name = 'Forest'
ns_dict = {'ns_public': f'{self.name}',
'ns_witness': f'{self.name}',
'ns_functions': f'{self.name}.{model_name}',
'ns_forest': f'{self.name}.{model_name}',
'ns_agriculture': f'{self.name}.{model_name}',
'ns_invest': f'{self.name}.{model_name}'}
self.ee.ns_manager.add_ns_def(ns_dict)
mod_path = 'climateeconomics.sos_wrapping.sos_wrapping_agriculture.forest.forest_disc.ForestDiscipline'
builder = self.ee.factory.get_builder_from_module(self.name, mod_path)
self.ee.factory.set_builders_to_coupling_builder(builder)
self.ee.configure()
self.ee.display_treeview_nodes()
self.year_start = 2020
self.year_end = 2035
self.time_step = 1
years = np.arange(self.year_start, self.year_end + 1, 1)
year_range = self.year_end - self.year_start + 1
self.CO2_per_ha = 4000
self.limit_deforestation_surface = 1000
# GtCO2
self.initial_emissions = 3.21
forest_invest = np.linspace(2, 10, year_range)
self.forest_invest_df = pd.DataFrame(
{"years": years, "forest_investment": forest_invest})
deforest_invest = np.linspace(10, 1, year_range)
self.deforest_invest_df = pd.DataFrame(
{"years": years, "investment": deforest_invest})
self.reforestation_cost_per_ha = 13800
wood_density = 600.0 # kg/m3
residues_density = 200.0 # kg/m3
residue_density_m3_per_ha = 46.5
# average of 360 and 600 divided by 5
wood_density_m3_per_ha = 96
construction_delay = 3
wood_residue_price_percent_dif = 0.34
wood_percentage_for_energy = 0.48
residue_percentage_for_energy = 0.48
density_per_ha = residue_density_m3_per_ha + \
wood_density_m3_per_ha
wood_percentage = wood_density_m3_per_ha / density_per_ha
residue_percentage = residue_density_m3_per_ha / density_per_ha
mean_density = wood_percentage * wood_density + \
residue_percentage * residues_density
years_between_harvest = 20
recycle_part = 0.52 # 52%
self.managed_wood_techno_dict = {'maturity': 5,
'wood_residues_moisture': 0.35, # 35% moisture content
'wood_residue_colorific_value': 4.356,
'Opex_percentage': 0.045,
'managed_wood_price_per_ha': 15000, # 13047,
'unmanaged_wood_price_per_ha': 11000, # 10483,
'Price_per_ha_unit': 'euro/ha',
'full_load_hours': 8760.0,
'euro_dollar': 1.1447, # in 2019, date of the paper
'percentage_production': 0.52,
'residue_density_percentage': residue_percentage,
'non_residue_density_percentage': wood_percentage,
'density_per_ha': density_per_ha,
'wood_percentage_for_energy': wood_percentage_for_energy,
'residue_percentage_for_energy': residue_percentage_for_energy,
'density': mean_density,
'wood_density': wood_density,
'residues_density': residues_density,
'density_per_ha_unit': 'm^3/ha',
'techno_evo_eff': 'no', # yes or no
'years_between_harvest': years_between_harvest,
'wood_residue_price_percent_dif': wood_residue_price_percent_dif,
'recycle_part': recycle_part,
'construction_delay': construction_delay,
'WACC': 0.07
}
self.invest_before_year_start = pd.DataFrame(
{'past_years': np.arange(-construction_delay, 0), 'investment': [1.135081, 1.135081, 1.135081]})
self.mw_initial_production = 1.25 * 0.92 * \
density_per_ha * mean_density * 3.6 / \
years_between_harvest / (1 - recycle_part) # in Twh
mw_invest = np.linspace(1, 10, year_range)
self.mw_invest_df = pd.DataFrame(
{"years": years, "investment": mw_invest})
transport = np.linspace(7.6, 7.6, year_range)
self.transport_df = pd.DataFrame(
{"years": years, "transport": transport})
self.margin = pd.DataFrame(
{'years': years, 'margin': np.ones(len(years)) * 110.0})
self.initial_protected_forest_surface = 4 * 0.21
self.initial_unmanaged_forest_surface = 4 - \
1.25 - self.initial_protected_forest_surface
inputs_dict = {f'{self.name}.year_start': self.year_start,
f'{self.name}.year_end': self.year_end,
f'{self.name}.time_step': 1,
f'{self.name}.{model_name}.{Forest.LIMIT_DEFORESTATION_SURFACE}': self.limit_deforestation_surface,
f'{self.name}.{model_name}.{Forest.DEFORESTATION_INVESTMENT}': self.deforest_invest_df,
f'{self.name}.{model_name}.{Forest.DEFORESTATION_COST_PER_HA}': 8000,
f'{self.name}.{model_name}.{Forest.CO2_PER_HA}': self.CO2_per_ha,
f'{self.name}.{model_name}.{Forest.INITIAL_CO2_EMISSIONS}': self.initial_emissions,
f'{self.name}.{model_name}.{Forest.REFORESTATION_INVESTMENT}': self.forest_invest_df,
f'{self.name}.{model_name}.{Forest.REFORESTATION_COST_PER_HA}': self.reforestation_cost_per_ha,
f'{self.name}.{model_name}.wood_techno_dict': self.managed_wood_techno_dict,
f'{self.name}.{model_name}.managed_wood_initial_prod': self.mw_initial_production,
f'{self.name}.{model_name}.managed_wood_initial_surface': 1.25 * 0.92,
f'{self.name}.{model_name}.managed_wood_invest_before_year_start': self.invest_before_year_start,
f'{self.name}.{model_name}.managed_wood_investment': self.mw_invest_df,
f'{self.name}.transport_cost': self.transport_df,
f'{self.name}.margin': self.margin,
f'{self.name}.{model_name}.initial_unmanaged_forest_surface': self.initial_unmanaged_forest_surface,
f'{self.name}.{model_name}.protected_forest_surface': self.initial_protected_forest_surface,
}
self.ee.load_study_from_input_dict(inputs_dict)
disc_techno = self.ee.root_process.sos_disciplines[0]
self.check_jacobian(location=dirname(__file__), filename=f'jacobian_forest_v2_discipline.pkl',
discipline=disc_techno, step=1e-15, derr_approx='complex_step',
inputs=[
f'{self.name}.{model_name}.{Forest.DEFORESTATION_INVESTMENT}',
f'{self.name}.{model_name}.{Forest.REFORESTATION_INVESTMENT}',
f'{self.name}.{model_name}.managed_wood_investment',
],
outputs=[
f'{self.name}.{Forest.FOREST_SURFACE_DF}',
f'{self.name}.{model_name}.CO2_land_emission_df',
#f'{self.name}.Forest.techno_production',
#f'{self.name}.Forest.techno_prices',
#f'{self.name}.Forest.techno_consumption',
#f'{self.name}.Forest.techno_consumption_woratio',
#f'{self.name}.Forest.land_use_required',
# f'{self.name}.Forest.CO2_emissions',
#f'{self.name}.Forest.techno_capital',
#f'{self.name}.Forest.non_use_capital'
]
)
def _test_forest_analytic_grad_unmanaged_limit(self):
model_name = 'Forest'
ns_dict = {'ns_public': f'{self.name}',
'ns_witness': f'{self.name}',
'ns_functions': f'{self.name}.{model_name}',
'ns_forest': f'{self.name}.{model_name}',
'ns_agriculture': f'{self.name}.{model_name}',
'ns_invest': f'{self.name}.{model_name}'}
self.ee.ns_manager.add_ns_def(ns_dict)
mod_path = 'climateeconomics.sos_wrapping.sos_wrapping_agriculture.forest.forest_disc.ForestDiscipline'
builder = self.ee.factory.get_builder_from_module(self.name, mod_path)
self.ee.factory.set_builders_to_coupling_builder(builder)
self.ee.configure()
self.ee.display_treeview_nodes()
self.year_start = 2020
self.year_end = 2035
self.time_step = 1
years = np.arange(self.year_start, self.year_end + 1, 1)
year_range = self.year_end - self.year_start + 1
deforestation_surface = np.array(np.linspace(4, 4, year_range))
self.deforestation_surface_df = pd.DataFrame(
{"years": years, "deforested_surface": deforestation_surface})
self.CO2_per_ha = 4000
self.limit_deforestation_surface = 1000
# GtCO2
self.initial_emissions = 3.21
forest_invest = np.linspace(2, 10, year_range)
self.forest_invest_df = pd.DataFrame(
{"years": years, "forest_investment": forest_invest})
deforest_invest = np.linspace(1000, 5000, year_range)
self.deforest_invest_df = pd.DataFrame(
{"years": years, "investment": deforest_invest})
self.reforestation_cost_per_ha = 13800
wood_density = 600.0 # kg/m3
residues_density = 200.0 # kg/m3
residue_density_m3_per_ha = 46.5
# average of 360 and 600 divided by 5
wood_density_m3_per_ha = 96
construction_delay = 5
wood_residue_price_percent_dif = 0.34
wood_percentage_for_energy = 0.48
residue_percentage_for_energy = 0.48
density_per_ha = residue_density_m3_per_ha + \
wood_density_m3_per_ha
wood_percentage = wood_density_m3_per_ha / density_per_ha
residue_percentage = residue_density_m3_per_ha / density_per_ha
mean_density = wood_percentage * wood_density + \
residue_percentage * residues_density
years_between_harvest = 25
recycle_part = 0.52 # 52%
self.managed_wood_techno_dict = {'maturity': 5,
'wood_residues_moisture': 0.35, # 35% moisture content
'wood_residue_colorific_value': 4.356,
'Opex_percentage': 0.045,
'managed_wood_price_per_ha': 15000, # 13047,
'unmanaged_wood_price_per_ha': 11000, # 10483,
'Price_per_ha_unit': 'euro/ha',
'full_load_hours': 8760.0,
'euro_dollar': 1.1447, # in 2019, date of the paper
'percentage_production': 0.52,
'residue_density_percentage': residue_percentage,
'non_residue_density_percentage': wood_percentage,
'density_per_ha': density_per_ha,
'wood_percentage_for_energy': wood_percentage_for_energy,
'residue_percentage_for_energy': residue_percentage_for_energy,
'density': mean_density,
'wood_density': wood_density,
'residues_density': residues_density,
'density_per_ha_unit': 'm^3/ha',
'techno_evo_eff': 'no', # yes or no
'years_between_harvest': years_between_harvest,
'wood_residue_price_percent_dif': wood_residue_price_percent_dif,
'recycle_part': recycle_part,
'construction_delay': construction_delay,
'WACC': 0.07
}
self.invest_before_year_start = pd.DataFrame(
{'past_years': np.arange(-construction_delay, 0), 'investment': np.array([1.135081] * construction_delay)})
self.mw_initial_production = 1.25 * 0.92 * \
density_per_ha * mean_density * 3.6 / \
years_between_harvest / (1 - recycle_part) # in Twh
mw_invest = np.linspace(1, 10, year_range)
self.mw_invest_df = pd.DataFrame(
{"years": years, "investment": mw_invest})
transport = np.linspace(7.6, 7.6, year_range)
self.transport_df = pd.DataFrame(
{"years": years, "transport": transport})
self.margin = pd.DataFrame(
{'years': years, 'margin': np.ones(len(years)) * 110.0})
self.initial_protected_forest_surface = 4 * 0.21
self.initial_unmanaged_forest_surface = 4 - \
1.25 - self.initial_protected_forest_surface
inputs_dict = {f'{self.name}.year_start': self.year_start,
f'{self.name}.year_end': self.year_end,
f'{self.name}.time_step': 1,
f'{self.name}.{model_name}.{Forest.LIMIT_DEFORESTATION_SURFACE}': self.limit_deforestation_surface,
f'{self.name}.{model_name}.{Forest.DEFORESTATION_INVESTMENT}': self.deforest_invest_df,
f'{self.name}.{model_name}.{Forest.DEFORESTATION_COST_PER_HA}': 8000,
f'{self.name}.{model_name}.{Forest.CO2_PER_HA}': self.CO2_per_ha,
f'{self.name}.{model_name}.{Forest.INITIAL_CO2_EMISSIONS}': self.initial_emissions,
f'{self.name}.{model_name}.{Forest.REFORESTATION_INVESTMENT}': self.forest_invest_df,
f'{self.name}.{model_name}.{Forest.REFORESTATION_COST_PER_HA}': self.reforestation_cost_per_ha,
f'{self.name}.{model_name}.wood_techno_dict': self.managed_wood_techno_dict,
f'{self.name}.{model_name}.managed_wood_initial_prod': self.mw_initial_production,
f'{self.name}.{model_name}.managed_wood_initial_surface': 1.25 * 0.92,
f'{self.name}.{model_name}.managed_wood_invest_before_year_start': self.invest_before_year_start,
f'{self.name}.{model_name}.managed_wood_investment': self.mw_invest_df,
f'{self.name}.transport_cost': self.transport_df,
f'{self.name}.margin': self.margin,
f'{self.name}.{model_name}.initial_unmanaged_forest_surface': self.initial_unmanaged_forest_surface,
f'{self.name}.{model_name}.protected_forest_surface': self.initial_protected_forest_surface,
}
self.ee.load_study_from_input_dict(inputs_dict)
disc_techno = self.ee.root_process.sos_disciplines[0]
self.check_jacobian(location=dirname(__file__), filename=f'jacobian_forest_v2_discipline_2.pkl',
discipline=disc_techno, step=1e-15, derr_approx='complex_step',
inputs=[
f'{self.name}.{model_name}.{Forest.DEFORESTATION_INVESTMENT}',
f'{self.name}.{model_name}.{Forest.REFORESTATION_INVESTMENT}',
f'{self.name}.{model_name}.managed_wood_investment',
],
outputs=[f'{self.name}.{Forest.FOREST_SURFACE_DF}',
f'{self.name}.{model_name}.CO2_land_emission_df',
#f'{self.name}.Forest.techno_production',
#f'{self.name}.Forest.techno_prices',
#f'{self.name}.Forest.techno_consumption',
#f'{self.name}.Forest.techno_consumption_woratio',
#f'{self.name}.Forest.land_use_required',
#f'{self.name}.Forest.CO2_emissions',
#f'{self.name}.Forest.techno_capital',
#f'{self.name}.Forest.non_use_capital'
]
)
| 54.614925
| 123
| 0.570671
| 2,029
| 18,296
| 4.793001
| 0.134056
| 0.06581
| 0.070334
| 0.063342
| 0.884627
| 0.877121
| 0.877121
| 0.877121
| 0.877121
| 0.877121
| 0
| 0.034027
| 0.330181
| 18,296
| 334
| 124
| 54.778443
| 0.759527
| 0.081985
| 0
| 0.841699
| 0
| 0
| 0.241612
| 0.18806
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015444
| false
| 0
| 0.023166
| 0.003861
| 0.046332
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20a8c1205f3b1228eafeef056004a91162f8a07f
| 26,940
|
py
|
Python
|
wharf_management/wharf_management/utils.py
|
staumoepeau/pms
|
67aa43fb27f75482275b50dff17f5919e6c394b1
|
[
"MIT"
] | 1
|
2021-02-19T11:45:24.000Z
|
2021-02-19T11:45:24.000Z
|
wharf_management/wharf_management/utils.py
|
staumoepeau/pms
|
67aa43fb27f75482275b50dff17f5919e6c394b1
|
[
"MIT"
] | 1
|
2021-08-12T08:29:50.000Z
|
2021-08-17T06:12:02.000Z
|
wharf_management/wharf_management/utils.py
|
staumoepeau/pms
|
67aa43fb27f75482275b50dff17f5919e6c394b1
|
[
"MIT"
] | 4
|
2020-07-22T20:30:00.000Z
|
2021-08-07T12:36:01.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Sione Taumoepeau and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import math
import frappe
from frappe import _
from frappe.utils import cstr, formatdate, cint, getdate, date_diff, add_days, time_diff_in_hours, rounded, now
from frappe.utils.user import get_user_fullname
def get_create_cargo_devan(doctype, cargo_ref, final_work_type, secondary_work_type, cargo_type, devan):
last_work_type, third_work_type, movement, payment, gate, yard_status, delivery_code = "", "", "", "", "", "", ""
custom_warrant, custom_code, inspection_status, yard_date, final_status = "", "", "", "", ""
if doctype == "Pre Advice":
val = frappe.db.get_value(doctype, {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","last_port",\
"qty","container_no","voyage_no", "bol","work_type","secondary_work_type","pol","agents","commodity_code","vessel","pod",\
"temperature", "container_type","mark","final_dest_port","volume", "container_size","consignee","container_content",\
"stowage","hazardous","hazardous_code", "status","seal_1","seal_2","eta_date","cargo_description","etd_date",\
"chasis_no","yard_slot","inspection_status","yard_status","final_status", "third_work_type"], as_dict=True)
elif doctype == "Cargo":
val = frappe.db.get_value(doctype, {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","last_port",\
"qty","container_no","voyage_no","custom_code","bol","work_type","secondary_work_type","pol","agents","commodity_code",\
"vessel","pod","temperature", "container_type","mark","final_dest_port","volume","custom_warrant", "container_size",\
"consignee","container_content","stowage","hazardous","hazardous_code", "status","seal_1","seal_2","eta_date",\
"cargo_description","etd_date","delivery_code","chasis_no","yard_slot","inspection_status","yard_status","final_status"], as_dict=True)
if final_work_type == "Discharged" and devan == "EMPTY" and val.third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type =None
container_content = "EMPTY"
secondary_work_type = val.secondary_work_type
if final_work_type == "Discharged" and devan == "EMPTY" and val.third_work_type == "Stock" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type ="Stock"
container_content = "EMPTY"
secondary_work_type = val.secondary_work_type
doc = frappe.new_doc("Cargo")
doc.update({
"docstatus" : 1,
"cargo_ref": cargo_ref,
"booking_ref" : val.booking_ref,
"pat_code" : val.pat_code,
"net_weight" : val.net_weight,
"cargo_type" : cargo_type,
"qty" : val.qty,
"container_no" : val.container_no,
"voyage_no" : val.voyage_no,
"bol" : val.bol,
"work_type" : final_work_type,
"work_type_date": now(),
"secondary_work_type" : secondary_work_type,
"additional_work": third_work_type,
"pol" : val.pol,
"agents" : val.agents,
"commodity_code" : val.commodity_code,
"vessel" : val.vessel,
"pod" : val.pod,
"temperature" : val.temperature,
"container_type" : val.container_type,
"mark" : val.mark,
"final_dest_port" : val.final_dest_port,
"volume" : val.volume,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : container_content,
"stowage" : val.stowage,
"hazardous" : val.hazardous,
"hazardous_code" : val.hazardous_code,
"status" : movement,
"seal_1" : val.seal_1,
"seal_2" : val.seal_2,
"eta_date" : val.eta_date,
"cargo_description" : val.cargo_description,
"etd_date" : val.etd_date,
"chasis_no" : val.chasis_no,
"inspection_status" : inspection_status,
"yard_status" : yard_status,
"yard_date" : yard_date,
"final_status" : final_status,
"payment_status" : payment,
"gate1_status" : gate,
"gate2_status" : gate,
"custom_warrant" : val.custom_warrant,
"custom_code" : val.custom_code,
"delivery_code" : val.delivery_code,
"inspection_date": now()
})
doc.insert(ignore_permissions=True)
doc.submit()
@frappe.whitelist()
def get_create_cargo(doctype, cargo_ref, final_work_type, secondary_work_type, cargo_type):
last_work_type, third_work_type, movement, payment, gate, yard_status, delivery_code = "", "", "", "", "", "", ""
container_content, custom_warrant, custom_code, inspection_status, yard_date, final_status = "", "", "", "", "", ""
if doctype == "Pre Advice":
booking_ref, pat_code, net_weight, last_port, qty, container_no, voyage_no, bol,work_type,secondary_work_type,\
pol,agents,commodity_code,vessel,pod,temperature, container_type,mark, final_dest_port,volume, container_size,consignee,\
container_content,stowage,hazardous,hazardous_code, status, seal_1,seal_2,eta_date,cargo_description,etd_date, chasis_no,\
yard_slot,inspection_status,yard_status, final_status, third_work_type = frappe.db.get_value(doctype, {'name': cargo_ref},\
['booking_ref','pat_code','net_weight','last_port','qty','container_no','voyage_no','bol','work_type',\
'secondary_work_type','pol','agents','commodity_code', 'vessel','pod','temperature', 'container_type','mark','final_dest_port',\
'volume', 'container_size','consignee','container_content', 'stowage','hazardous','hazardous_code', 'status','seal_1','seal_2',\
'eta_date','cargo_description','etd_date', 'chasis_no', 'yard_slot','inspection_status','yard_status','final_status','third_work_type'])
if doctype == "Cargo":
booking_ref,pat_code,net_weight,last_port,qty,container_no,voyage_no,custom_code, bol,work_type,\
secondary_work_type, pol,agents,commodity_code,vessel,pod,temperature, mark,container_type,final_dest_port,\
volume,custom_warrant, container_size,consignee,container_content,stowage,hazardous,hazardous_code, status,\
seal_1,seal_2,eta_date,cargo_description, etd_date,delivery_code, chasis_no,yard_slot,inspection_status,yard_status,\
final_status = frappe.db.get_value(doctype, {'name': cargo_ref}, ['booking_ref','pat_code','net_weight',\
'last_port','qty','container_no','voyage_no','custom_code', 'bol','work_type','secondary_work_type','pol','agents',\
'commodity_code','vessel','pod','temperature', 'mark','container_type','final_dest_port','volume','custom_warrant',\
'container_size','consignee','container_content','stowage','hazardous','hazardous_code', 'status','seal_1','seal_2',\
'eta_date','cargo_description','etd_date','delivery_code', 'chasis_no','yard_slot','inspection_status','yard_status','final_status'])
if final_work_type == "Loading" and secondary_work_type == "Export":
inspection_status = "Closed"
movement = "Outbound"
yard_status = "Closed"
payment = "Closed"
gate = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
last_work_type = None
container_content = container_content
if final_work_type == "Loading" and secondary_work_type == "Stock":
inspection_status = "Closed"
movement = "Outbound"
yard_status = "Closed"
payment = "Closed"
gate = "Closed"
yard_date = now()
final_status = final_work_type
last_work_type = None
container_content = container_content
if final_work_type == "Loading" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Loading" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Closed"
gate = "Open"
inspection_status = "Open"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and not secondary_work_type:
inspection_status = "Closed"
movement = "Inspection"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Re-stowing":
secondary_work_type = "Re-stowing"
movement = "Re-stowing"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and not secondary_work_type:
inspection_status = "Closed"
movement = "Inspection"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Re-stowing":
secondary_work_type = "Re-stowing"
movement = "Re-stowing"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Open"
gate = "Open"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Open"
gate = "Open"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Devanning"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Devanning"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type != "Container":
secondary_work_type = None
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Discharged"
third_work_type = None
container_content = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type != "Container":
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
container_content = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type != "Container":
secondary_work_type = None
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Discharged"
third_work_type = None
concontent = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type != "Container":
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
concontent = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type == "Container":
inspection_status = "Closed"
yard_status = "Open"
final_status = "Discharged"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
container_content = "FULL"
movement = "Devanning"
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type == "Container":
inspection_status = "Closed"
yard_status = "Open"
final_status = "Discharged"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
concontent = "FULL"
movement = "Devanning"
last_work_type = None
doc = frappe.new_doc("Cargo")
doc.update({
"docstatus" : 1,
"cargo_ref": cargo_ref,
"booking_ref" : booking_ref,
"pat_code" : pat_code,
"net_weight" : net_weight,
"cargo_type" : cargo_type,
"qty" : qty,
"container_no" : container_no,
"voyage_no" : voyage_no,
"bol" : bol,
"work_type" : final_work_type,
"work_type_date": now(),
"secondary_work_type" : secondary_work_type,
"additional_work": third_work_type,
"last_work" : last_work_type,
"pol" : pol,
"agents" : agents,
"commodity_code" : commodity_code,
"vessel" : vessel,
"pod" : pod,
"temperature" : temperature,
"container_type" : container_type,
"mark" : mark,
"final_dest_port" : final_dest_port,
"volume" : volume,
"container_size" : container_size,
"consignee" : consignee,
"container_content" : container_content,
"stowage" : stowage,
"hazardous" : hazardous,
"hazardous_code" : hazardous_code,
"status" : movement,
"seal_1" : seal_1,
"seal_2" : seal_2,
"cargo_description" : cargo_description,
"eta_date" : eta_date,
"etd_date" : etd_date,
"chasis_no" : chasis_no,
"inspection_status" : inspection_status,
"yard_status" : yard_status,
"yard_date" : yard_date,
"final_status" : final_status,
"payment_status" : payment,
"gate1_status" : gate,
"gate2_status" : gate,
"custom_warrant" : custom_warrant,
"custom_code" : custom_code,
"delivery_code" : delivery_code,
"inspection_date": now()
})
doc.insert(ignore_permissions=True)
doc.submit()
@frappe.whitelist()
def create_cargo_movement(cargo_ref, work_type, gate_status, gate):
val = frappe.db.get_value("Cargo", {"name": cargo_ref}, ["pat_code","cargo_type","container_no","agents",\
"container_type","container_size", "chasis_no", "mark", "qty", "consignee", "container_content","cargo_description",\
"custom_warrant", "eta_date", "etd_date", "booking_ref"], as_dict=True)
info = frappe.db.get_value(gate, {"cargo_ref": cargo_ref}, ['truck_licenses_plate','drivers_information','modified','name'], as_dict=True)
if gate == "Gate2" and work_type == "Loading":
# frappe.msgprint(_("TSESESESE"), raise_exception=True)
gate2_no = gate_status
gate2_date = now()
gate2_time = now()
gate_content = "FULL"
gate_no = None
gate_date = None
gate_time = None
reference = info.name
if gate == "Gate1" and work_type == "Discharged":
gate_no = gate_status
gate_date = info.modified
gate_time = info.modified
gate_content = None
gate2_no = None
gate2_date = None
gate2_time = None
gate_content = None
reference = val.name
doc = frappe.new_doc("Cargo Movement")
doc.update({
"docstatus" : 1,
"pat_code" : val.pat_code,
"cargo_type" : val.cargo_type,
"container_no" : val.container_no,
"work_type" : work_type,
"agents" : val.agents,
"container_type" : val.container_type,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : val.container_content,
"cargo_description" : val.cargo_description,
"main_gate_status" : gate2_no,
"main_gate_date" : gate2_date,
"main_gate_time" : gate2_time,
"gate_status" : gate_no,
"movement_date" : gate_date,
"gate1_time" : gate_time,
"truck" : info.truck_licenses_plate,
"truck_driver" : info.drivers_information,
"refrence": reference,
"chasis_no" : val.chasis_no,
"main_gate_content" : gate_content,
"mark" : val.mark,
"qty" : val.qty,
"warrant_number" : val.custom_warrant,
"eta_date" : val.eta_date,
"etd_date" : val.etd_date,
"booking_ref" : val.booking_ref
})
doc.insert(ignore_permissions=True)
doc.submit()
# frappe.msgprint(_("Hello Cargo Movement"), raise_exception=True)
@frappe.whitelist()
def create_preadvise_history(cargo_ref):
val = frappe.db.get_value("Pre Advice", {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","qty",\
"container_no","voyage_no","bol","work_type","secondary_work_type","pol","agents","commodity_code","vessel","pod",\
"temperature", "container_type","mark","final_dest_port","volume","container_size","consignee","container_content",\
"stowage","hazardous","hazardous_code","status","seal_1","seal_2","eta_date","cargo_description","etd_date","chasis_no",\
"yard_slot","inspection_status","yard_status","final_status","break_bulk_item_count","security_item_count"], as_dict=True)
doc = frappe.new_doc("Pre Advise History")
doc.update({
"docstatus" : 1,
"booking_ref" : val.booking_ref,
"pat_code" : val.pat_code,
"net_weight" : val.net_weight,
"cargo_type" : val.cargo_type,
"qty" : val.qty,
"container_no" : val.container_no,
"voyage_no" : val.voyage_no,
"bol" : val.bol,
"work_type" : val.work_type,
"secondary_work_type" : val.secondary_work_type,
"pol" : val.pol,
"agents" : val.agents,
"commodity_code" : val.commodity_code,
"vessel" : val.vessel,
"pod" : val.pod,
"temperature" : val.temperature,
"container_type" : val.container_type,
"mark" : val.mark,
"final_dest_port" : val.final_dest_port,
"volume" : val.volume,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : val.container_content,
"stowage" : val.stowage,
"hazardous" : val.hazardous,
"hazardous_code" : val.hazardous_code,
"status" : "Yard",
"seal_1" : val.seal_1,
"seal_2" : val.seal_2,
"eta_date" : val.eta_date,
"cargo_description" : val.cargo_description,
"etd_date" : val.etd_date,
"chasis_no" : val.chasis_no,
"yard_slot" : val.yard_slot,
"inspection_status" : val.inspection_status,
"yard_status" : val.yard_status,
"final_status" : val.final_status,
"break_bulk_item_count" : val.break_bulk_item_count,
"security_item_count" : val.security_item_count
})
doc.insert(ignore_permissions=True)
doc.submit()
frappe.db.delete('Pre Advice', {'name': cargo_ref })
@frappe.whitelist()
def update_main_gate_status(name_ref, truck_licenses_plate, drivers_information):
full_name = get_user_fullname(frappe.session['user'])
frappe.db.sql("""UPDATE `tabExport` SET truck_licenses_plate=%s, drivers_information=%s, main_gate_status="Closed",
main_gate_date =%s, status="Main Gate IN", main_gate_created_by=%s, main_gate_user_name=%s
WHERE name=%s""", (truck_licenses_plate, drivers_information, now(), frappe.session.user, full_name, name_ref))
val = frappe.db.get_value("Export", {"name": name_ref}, ["name","cargo_type","container_no","agents","container_type","container_size","container_content","cargo_description"], as_dict=True)
if not val.cargo_type:
if val.container_content == "EMPTY" or val.container_content == "FULL":
val.cargo_type == "Container"
doc = frappe.new_doc("Cargo Movement")
doc.update({
"docstatus" : 1,
"cargo_type" : val.cargo_type,
"container_no" : val.container_no,
"agents" : val.agents,
"container_type" : val.container_type,
"container_size" : val.container_size,
"consignee" : val.consignee,
"main_gate_content" : val.container_content,
"cargo_description" : val.cargo_description,
"main_gate_status" : "IN",
"main_gate_date" : now(),
"main_gate_time" : now(),
"truck" : truck_licenses_plate,
"truck_driver" : drivers_information,
"refrence": val.name
})
doc.insert()
doc.submit()
@frappe.whitelist()
def update_gate1_status(name_ref):
full_name = get_user_fullname(frappe.session['user'])
frappe.db.sql("""UPDATE `tabExport` SET export_gate1_status="Closed", export_gate1_date =%s, status="Gate1 IN",
gate1_created_by=%s, gate1_user_name=%s
WHERE name=%s""", (now(), frappe.session.user, full_name, name_ref))
val = frappe.db.get_value("Export", {"name": name_ref}, ["name","container_content"], as_dict=True)
frappe.db.sql("""Update `tabCargo Movement` set gate_status='IN', container_content=%s, movement_date=%s, gate1_time=%s where refrence=%s""",
(val.container_content, now(), now(), name_ref))
| 43.804878
| 194
| 0.605308
| 2,961
| 26,940
| 5.142857
| 0.057751
| 0.096664
| 0.059167
| 0.036249
| 0.824796
| 0.805687
| 0.788482
| 0.776202
| 0.769963
| 0.760376
| 0
| 0.003042
| 0.279955
| 26,940
| 615
| 195
| 43.804878
| 0.781988
| 0.009651
| 0
| 0.724199
| 0
| 0.001779
| 0.241687
| 0.00791
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010676
| false
| 0
| 0.010676
| 0
| 0.021352
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20b2ffa9c9922ec5e76c99dca9c065538352266b
| 152
|
py
|
Python
|
source/tests/metacall_configuration_exec_path_test/data/scripts/main.py
|
Zedonboy/core
|
79a4d959659a0f96b940b28d44476943de120d95
|
[
"Apache-2.0"
] | null | null | null |
source/tests/metacall_configuration_exec_path_test/data/scripts/main.py
|
Zedonboy/core
|
79a4d959659a0f96b940b28d44476943de120d95
|
[
"Apache-2.0"
] | null | null | null |
source/tests/metacall_configuration_exec_path_test/data/scripts/main.py
|
Zedonboy/core
|
79a4d959659a0f96b940b28d44476943de120d95
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3.5
import metacall_configuration_exec_path_test
def main():
return metacall_configuration_exec_path_test.hello_world('test')
| 21.714286
| 66
| 0.802632
| 21
| 152
| 5.380952
| 0.714286
| 0.371681
| 0.442478
| 0.513274
| 0.584071
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.105263
| 152
| 6
| 67
| 25.333333
| 0.816176
| 0.125
| 0
| 0
| 0
| 0
| 0.031746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 10
|
20dab5ab4111a24b5eed2612d58a7c0c855c8666
| 130,150
|
py
|
Python
|
com/vmware/nsx/fabric_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/fabric_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/fabric_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2019 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.fabric.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class ComputeCollectionFabricTemplates(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_collection_fabric_templates'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeCollectionFabricTemplatesStub)
def create(self,
compute_collection_fabric_template,
):
"""
Fabric templates are fabric configurations applied at the compute
collection level. This configurations is used to decide what automated
operations should be a run when a host membership changes.
:type compute_collection_fabric_template: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:param compute_collection_fabric_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'compute_collection_fabric_template': compute_collection_fabric_template,
})
def delete(self,
fabric_template_id,
):
"""
Deletes compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'fabric_template_id': fabric_template_id,
})
def get(self,
fabric_template_id,
):
"""
Get compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'fabric_template_id': fabric_template_id,
})
def list(self,
compute_collection_id=None,
):
"""
Returns compute collection fabric templates
:type compute_collection_id: :class:`str` or ``None``
:param compute_collection_id: Compute collection id (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplateListResult`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplateListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'compute_collection_id': compute_collection_id,
})
def update(self,
fabric_template_id,
compute_collection_fabric_template,
):
"""
Updates compute collection fabric template for the given id
:type fabric_template_id: :class:`str`
:param fabric_template_id: (required)
:type compute_collection_fabric_template: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:param compute_collection_fabric_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionFabricTemplate`
:return: com.vmware.nsx.model.ComputeCollectionFabricTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'fabric_template_id': fabric_template_id,
'compute_collection_fabric_template': compute_collection_fabric_template,
})
class ComputeCollections(VapiInterface):
"""
"""
CREATE_ACTION_NSX = "remove_nsx"
"""
Possible value for ``action`` of method :func:`ComputeCollections.create`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_collections'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeCollectionsStub)
def create(self,
cc_ext_id,
action=None,
):
"""
Perform action specific to NSX on the compute-collection
:type cc_ext_id: :class:`str`
:param cc_ext_id: (required)
:type action: :class:`str` or ``None``
:param action: Supported actions on compute-collection (optional)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'cc_ext_id': cc_ext_id,
'action': action,
})
def get(self,
cc_ext_id,
):
"""
Returns information about a specific compute collection.
:type cc_ext_id: :class:`str`
:param cc_ext_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollection`
:return: com.vmware.nsx.model.ComputeCollection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'cc_ext_id': cc_ext_id,
})
def list(self,
cm_local_id=None,
cursor=None,
discovered_node_id=None,
display_name=None,
external_id=None,
included_fields=None,
node_id=None,
origin_id=None,
origin_type=None,
owner_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all compute collections.
:type cm_local_id: :class:`str` or ``None``
:param cm_local_id: Local Id of the compute collection in the Compute Manager
(optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type discovered_node_id: :class:`str` or ``None``
:param discovered_node_id: Id of the discovered node which belongs to this Compute Collection
(optional)
:type display_name: :class:`str` or ``None``
:param display_name: Name of the ComputeCollection in source compute manager (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External ID of the ComputeCollection in the source Compute manager,
e.g. mo-ref in VC (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type node_id: :class:`str` or ``None``
:param node_id: Id of the fabric node created from a discovered node belonging to
this Compute Collection (optional)
:type origin_id: :class:`str` or ``None``
:param origin_id: Id of the compute manager from where this Compute Collection was
discovered (optional)
:type origin_type: :class:`str` or ``None``
:param origin_type: ComputeCollection type like VC_Cluster. Here the Compute Manager
type prefix would help in differentiating similar named Compute
Collection types from different Compute Managers (optional)
:type owner_id: :class:`str` or ``None``
:param owner_id: Id of the owner of compute collection in the Compute Manager
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionListResult`
:return: com.vmware.nsx.model.ComputeCollectionListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cm_local_id': cm_local_id,
'cursor': cursor,
'discovered_node_id': discovered_node_id,
'display_name': display_name,
'external_id': external_id,
'included_fields': included_fields,
'node_id': node_id,
'origin_id': origin_id,
'origin_type': origin_type,
'owner_id': owner_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class ComputeManagers(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.compute_managers'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeManagersStub)
def create(self,
compute_manager,
):
"""
Registers compute manager with NSX. Inventory service will collect data
from the registered compute manager
:type compute_manager: :class:`com.vmware.nsx.model_client.ComputeManager`
:param compute_manager: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'compute_manager': compute_manager,
})
def delete(self,
compute_manager_id,
):
"""
Unregisters a specified compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'compute_manager_id': compute_manager_id,
})
def get(self,
compute_manager_id,
):
"""
Returns information about a specific compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'compute_manager_id': compute_manager_id,
})
def list(self,
cursor=None,
included_fields=None,
origin_type=None,
page_size=None,
server=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all compute managers.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type origin_type: :class:`str` or ``None``
:param origin_type: Compute manager type like vCenter (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type server: :class:`str` or ``None``
:param server: IP address or hostname of compute manager (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManagerListResult`
:return: com.vmware.nsx.model.ComputeManagerListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'origin_type': origin_type,
'page_size': page_size,
'server': server,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
compute_manager_id,
compute_manager,
):
"""
Updates a specified compute manager
:type compute_manager_id: :class:`str`
:param compute_manager_id: (required)
:type compute_manager: :class:`com.vmware.nsx.model_client.ComputeManager`
:param compute_manager: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeManager`
:return: com.vmware.nsx.model.ComputeManager
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'compute_manager_id': compute_manager_id,
'compute_manager': compute_manager,
})
class DiscoveredNodes(VapiInterface):
"""
"""
LIST_HAS_PARENT_TRUE = "true"
"""
Possible value for ``hasParent`` of method :func:`DiscoveredNodes.list`.
"""
LIST_HAS_PARENT_FALSE = "false"
"""
Possible value for ``hasParent`` of method :func:`DiscoveredNodes.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.discovered_nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DiscoveredNodesStub)
def get(self,
node_ext_id,
):
"""
Returns information about a specific discovered node.
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.DiscoveredNode`
:return: com.vmware.nsx.model.DiscoveredNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'node_ext_id': node_ext_id,
})
def hostprep(self,
node_ext_id,
):
"""
Prepares(hostprep) discovered node for NSX. NSX LCP bundles are
installed on this discovered node.
:type node_ext_id: :class:`str`
:param node_ext_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('hostprep',
{
'node_ext_id': node_ext_id,
})
def list(self,
cm_local_id=None,
cursor=None,
display_name=None,
external_id=None,
has_parent=None,
included_fields=None,
ip_address=None,
node_id=None,
node_type=None,
origin_id=None,
page_size=None,
parent_compute_collection=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all discovered nodes.
:type cm_local_id: :class:`str` or ``None``
:param cm_local_id: Local Id of the discovered node in the Compute Manager (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type display_name: :class:`str` or ``None``
:param display_name: Display name of discovered node (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External id of the discovered node, ex. a mo-ref from VC (optional)
:type has_parent: :class:`str` or ``None``
:param has_parent: Discovered node has a parent compute collection or is a standalone
host (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type ip_address: :class:`str` or ``None``
:param ip_address: IP address of the discovered node (optional)
:type node_id: :class:`str` or ``None``
:param node_id: Id of the fabric node created from the discovered node (optional)
:type node_type: :class:`str` or ``None``
:param node_type: Discovered Node type like HostNode (optional)
:type origin_id: :class:`str` or ``None``
:param origin_id: Id of the compute manager from where this node was discovered
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type parent_compute_collection: :class:`str` or ``None``
:param parent_compute_collection: External id of the compute collection to which this node belongs
(optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.DiscoveredNodeListResult`
:return: com.vmware.nsx.model.DiscoveredNodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cm_local_id': cm_local_id,
'cursor': cursor,
'display_name': display_name,
'external_id': external_id,
'has_parent': has_parent,
'included_fields': included_fields,
'ip_address': ip_address,
'node_id': node_id,
'node_type': node_type,
'origin_id': origin_id,
'page_size': page_size,
'parent_compute_collection': parent_compute_collection,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class Nodes(VapiInterface):
"""
"""
LIST_HYPERVISOR_OS_TYPE_ESXI = "ESXI"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELKVM = "RHELKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_UBUNTUKVM = "UBUNTUKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_HYPERV = "HYPERV"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELCONTAINER = "RHELCONTAINER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_RHELSERVER = "RHELSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_UBUNTUSERVER = "UBUNTUSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_CENTOSSERVER = "CENTOSSERVER"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_HYPERVISOR_OS_TYPE_CENTOSKVM = "CENTOSKVM"
"""
Possible value for ``hypervisorOsType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_HOSTNODE = "HostNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_EDGENODE = "EdgeNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
LIST_RESOURCE_TYPE_PUBLICCLOUDGATEWAYNODE = "PublicCloudGatewayNode"
"""
Possible value for ``resourceType`` of method :func:`Nodes.list`.
"""
PERFORMACTION_ACTION_ENTER_MAINTENANCE_MODE = "enter_maintenance_mode"
"""
Possible value for ``action`` of method :func:`Nodes.performaction`.
"""
PERFORMACTION_ACTION_EXIT_MAINTENANCE_MODE = "exit_maintenance_mode"
"""
Possible value for ``action`` of method :func:`Nodes.performaction`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NodesStub)
def create(self,
node,
):
"""
Creates a host node (hypervisor) or edge node (router) in the transport
network. When you run this command for a host, NSX Manager attempts to
install the NSX kernel modules, which are packaged as VIB, RPM, or DEB
files. For the installation to succeed, you must provide the host login
credentials and the host thumbprint. To get the ESXi host thumbprint,
SSH to the host and run the **openssl x509 -in /etc/vmware/ssl/rui.crt
-fingerprint -sha256 -noout** command. To generate host key thumbprint
using SHA-256 algorithm please follow the steps below. Log into the
host, making sure that the connection is not vulnerable to a man in the
middle attack. Check whether a public key already exists. Host public
key is generally located at '/etc/ssh/ssh_host_rsa_key.pub'. If the key
is not present then generate a new key by running the following command
and follow the instructions. **ssh-keygen -t rsa** Now generate a
SHA256 hash of the key using the following command. Please make sure to
pass the appropriate file name if the public key is stored with a
different file name other than the default 'id_rsa.pub'. **awk '{print
$2}' id_rsa.pub | base64 -d | sha256sum -b | sed 's/ .\*$//' | xxd -r
-p | base64**
:type node: :class:`vmware.vapi.struct.VapiStruct`
:param node: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'node': node,
})
def create_0(self,
target_node_id,
target_uri,
):
"""
Invoke POST request on target fabric node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def delete(self,
node_id,
unprepare_host=None,
):
"""
Removes a specified fabric node (host or edge). A fabric node may only
be deleted when it is no longer referenced by a Transport Node. If
unprepare_host option is set to false, the host will be deleted without
uninstalling the NSX components from the host.
:type node_id: :class:`str`
:param node_id: (required)
:type unprepare_host: :class:`bool` or ``None``
:param unprepare_host: Delete a host and uninstall NSX components (optional, default to
true)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'node_id': node_id,
'unprepare_host': unprepare_host,
})
def delete_0(self,
target_node_id,
target_uri,
):
"""
Invoke DELETE request on target fabric node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def get(self,
node_id,
):
"""
Returns information about a specific fabric node (host or edge).
:type node_id: :class:`str`
:param node_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'node_id': node_id,
})
def get_0(self,
target_node_id,
target_uri,
):
"""
Invoke GET request on target fabric node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def list(self,
cursor=None,
discovered_node_id=None,
display_name=None,
external_id=None,
hardware_id=None,
hypervisor_os_type=None,
included_fields=None,
ip_address=None,
page_size=None,
resource_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all fabric nodes (hosts and edges).
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type discovered_node_id: :class:`str` or ``None``
:param discovered_node_id: Id of the discovered node which was converted to create this node
(optional)
:type display_name: :class:`str` or ``None``
:param display_name: HostNode display name (optional)
:type external_id: :class:`str` or ``None``
:param external_id: HostNode external id (optional)
:type hardware_id: :class:`str` or ``None``
:param hardware_id: Hardware Id of the host (optional)
:type hypervisor_os_type: :class:`str` or ``None``
:param hypervisor_os_type: HostNode's Hypervisor type, for example ESXi, RHEL KVM or UBUNTU
KVM. (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type ip_address: :class:`str` or ``None``
:param ip_address: Management IP address of the node (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type resource_type: :class:`str` or ``None``
:param resource_type: Node type from 'HostNode', 'EdgeNode', 'PublicCloudGatewayNode'
(optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NodeListResult`
:return: com.vmware.nsx.model.NodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'discovered_node_id': discovered_node_id,
'display_name': display_name,
'external_id': external_id,
'hardware_id': hardware_id,
'hypervisor_os_type': hypervisor_os_type,
'included_fields': included_fields,
'ip_address': ip_address,
'page_size': page_size,
'resource_type': resource_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def performaction(self,
node_id,
action=None,
):
"""
The supported fabric node actions are enter_maintenance_mode,
exit_maintenance_mode for EdgeNode. This API is deprecated, please call
TransportNode maintenance mode API to update maintenance mode, refer to
\"Update transport node maintenance mode\".
:type node_id: :class:`str`
:param node_id: (required)
:type action: :class:`str` or ``None``
:param action: Supported fabric node actions (optional)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('performaction',
{
'node_id': node_id,
'action': action,
})
def restartinventorysync(self,
node_id,
):
"""
Restart the inventory sync for the node if it is currently internally
paused. After this action the next inventory sync coming from the node
is processed.
:type node_id: :class:`str`
:param node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restartinventorysync',
{
'node_id': node_id,
})
def update(self,
node_id,
node,
):
"""
Modifies attributes of a fabric node (host or edge).
:type node_id: :class:`str`
:param node_id: (required)
:type node: :class:`vmware.vapi.struct.VapiStruct`
:param node: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_id': node_id,
'node': node,
})
def update_0(self,
target_node_id,
target_uri,
):
"""
Invoke PUT request on target fabric node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update_0',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def upgradeinfra(self,
node_id,
disable_vm_migration=None,
):
"""
Perform a service deployment upgrade on a host node
:type node_id: :class:`str`
:param node_id: (required)
:type disable_vm_migration: :class:`bool` or ``None``
:param disable_vm_migration: Should VM migration be disabled during upgrade (optional, default
to false)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.Node
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.Node`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('upgradeinfra',
{
'node_id': node_id,
'disable_vm_migration': disable_vm_migration,
})
class Vifs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.vifs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VifsStub)
def list(self,
cursor=None,
host_id=None,
included_fields=None,
lport_attachment_id=None,
owner_vm_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
vm_id=None,
):
"""
Returns information about all VIFs. A virtual network interface
aggregates network interfaces into a logical interface unit that is
indistinuishable from a physical network interface.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type host_id: :class:`str` or ``None``
:param host_id: Id of the host where this vif is located. (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type lport_attachment_id: :class:`str` or ``None``
:param lport_attachment_id: LPort Attachment Id of the virtual network interface. (optional)
:type owner_vm_id: :class:`str` or ``None``
:param owner_vm_id: External id of the virtual machine. (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type vm_id: :class:`str` or ``None``
:param vm_id: External id of the virtual machine. (optional)
:rtype: :class:`com.vmware.nsx.model_client.VirtualNetworkInterfaceListResult`
:return: com.vmware.nsx.model.VirtualNetworkInterfaceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'host_id': host_id,
'included_fields': included_fields,
'lport_attachment_id': lport_attachment_id,
'owner_vm_id': owner_vm_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'vm_id': vm_id,
})
class VirtualMachines(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.fabric.virtual_machines'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VirtualMachinesStub)
def list(self,
cursor=None,
display_name=None,
external_id=None,
host_id=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all virtual machines.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type display_name: :class:`str` or ``None``
:param display_name: Display Name of the virtual machine (optional)
:type external_id: :class:`str` or ``None``
:param external_id: External id of the virtual machine (optional)
:type host_id: :class:`str` or ``None``
:param host_id: Id of the host where this vif is located (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.VirtualMachineListResult`
:return: com.vmware.nsx.model.VirtualMachineListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'display_name': display_name,
'external_id': external_id,
'host_id': host_id,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def updatetags(self,
virtual_machine_tag_update,
):
"""
Update tags applied to the virtual machine. External id of the virtual
machine will be specified in the request body. Request body should
contain all the tags to be applied. To clear all tags, provide an empty
list. User can apply maximum 10 tags on a virtual machine. The
remaining 5 are reserved for system defined tags.
:type virtual_machine_tag_update: :class:`com.vmware.nsx.model_client.VirtualMachineTagUpdate`
:param virtual_machine_tag_update: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('updatetags',
{
'virtual_machine_tag_update': virtual_machine_tag_update,
})
class _ComputeCollectionFabricTemplatesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'compute_collection_fabric_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-collection-fabric-templates',
request_body_parameter='compute_collection_fabric_template',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'compute_collection_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collection-fabric-templates',
path_variables={
},
query_parameters={
'compute_collection_id': 'compute_collection_id',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'fabric_template_id': type.StringType(),
'compute_collection_fabric_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/compute-collection-fabric-templates/{fabric-template-id}',
request_body_parameter='compute_collection_fabric_template',
path_variables={
'fabric_template_id': 'fabric-template-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplateListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionFabricTemplate'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_collection_fabric_templates',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ComputeCollectionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cc_ext_id': type.StringType(),
'action': type.OptionalType(type.StringType()),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-collections/{cc-ext-id}',
path_variables={
'cc_ext_id': 'cc-ext-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cc_ext_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collections/{cc-ext-id}',
path_variables={
'cc_ext_id': 'cc-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cm_local_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'discovered_node_id': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'node_id': type.OptionalType(type.StringType()),
'origin_id': type.OptionalType(type.StringType()),
'origin_type': type.OptionalType(type.StringType()),
'owner_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-collections',
path_variables={
},
query_parameters={
'cm_local_id': 'cm_local_id',
'cursor': 'cursor',
'discovered_node_id': 'discovered_node_id',
'display_name': 'display_name',
'external_id': 'external_id',
'included_fields': 'included_fields',
'node_id': 'node_id',
'origin_id': 'origin_id',
'origin_type': 'origin_type',
'owner_id': 'owner_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.VoidType(),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollection'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_collections',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ComputeManagersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'compute_manager': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/compute-managers',
request_body_parameter='compute_manager',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'origin_type': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'server': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/compute-managers',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'origin_type': 'origin_type',
'page_size': 'page_size',
'server': 'server',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'compute_manager_id': type.StringType(),
'compute_manager': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/compute-managers/{compute-manager-id}',
request_body_parameter='compute_manager',
path_variables={
'compute_manager_id': 'compute-manager-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManagerListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeManager'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.compute_managers',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _DiscoveredNodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for hostprep operation
hostprep_input_type = type.StructType('operation-input', {
'node_ext_id': type.StringType(),
})
hostprep_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
hostprep_input_value_validator_list = [
]
hostprep_output_validator_list = [
HasFieldsOfValidator()
]
hostprep_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/discovered-nodes/{node-ext-id}?action=hostprep',
path_variables={
'node_ext_id': 'node-ext-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cm_local_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'has_parent': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'ip_address': type.OptionalType(type.StringType()),
'node_id': type.OptionalType(type.StringType()),
'node_type': type.OptionalType(type.StringType()),
'origin_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'parent_compute_collection': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/discovered-nodes',
path_variables={
},
query_parameters={
'cm_local_id': 'cm_local_id',
'cursor': 'cursor',
'display_name': 'display_name',
'external_id': 'external_id',
'has_parent': 'has_parent',
'included_fields': 'included_fields',
'ip_address': 'ip_address',
'node_id': 'node_id',
'node_type': 'node_type',
'origin_id': 'origin_id',
'page_size': 'page_size',
'parent_compute_collection': 'parent_compute_collection',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'DiscoveredNode'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'hostprep': {
'input_type': hostprep_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': hostprep_error_dict,
'input_value_validator_list': hostprep_input_value_validator_list,
'output_validator_list': hostprep_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'DiscoveredNodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'hostprep': hostprep_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.discovered_nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'node': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes',
request_body_parameter='node',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for create_0 operation
create_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
create_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_0_input_value_validator_list = [
]
create_0_output_validator_list = [
]
create_0_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'unprepare_host': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'unprepare_host': 'unprepare_host',
},
content_type='application/json'
)
# properties for delete_0 operation
delete_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
delete_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_0_input_value_validator_list = [
]
delete_0_output_validator_list = [
]
delete_0_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get_0 operation
get_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
get_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_0_input_value_validator_list = [
]
get_0_output_validator_list = [
]
get_0_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'discovered_node_id': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'hardware_id': type.OptionalType(type.StringType()),
'hypervisor_os_type': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'ip_address': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'resource_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/nodes',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'discovered_node_id': 'discovered_node_id',
'display_name': 'display_name',
'external_id': 'external_id',
'hardware_id': 'hardware_id',
'hypervisor_os_type': 'hypervisor_os_type',
'included_fields': 'included_fields',
'ip_address': 'ip_address',
'page_size': 'page_size',
'resource_type': 'resource_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for performaction operation
performaction_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'action': type.OptionalType(type.StringType()),
})
performaction_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
performaction_input_value_validator_list = [
]
performaction_output_validator_list = [
HasFieldsOfValidator()
]
performaction_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
# properties for restartinventorysync operation
restartinventorysync_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
})
restartinventorysync_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restartinventorysync_input_value_validator_list = [
]
restartinventorysync_output_validator_list = [
]
restartinventorysync_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}?action=restart_inventory_sync',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'node': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/nodes/{node-id}',
request_body_parameter='node',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update_0 operation
update_0_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
update_0_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_0_input_value_validator_list = [
]
update_0_output_validator_list = [
]
update_0_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/fabric/nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for upgradeinfra operation
upgradeinfra_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'disable_vm_migration': type.OptionalType(type.BooleanType()),
})
upgradeinfra_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
upgradeinfra_input_value_validator_list = [
]
upgradeinfra_output_validator_list = [
HasFieldsOfValidator()
]
upgradeinfra_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/nodes/{node-id}?action=upgrade_infra',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'disable_vm_migration': 'disable_vm_migration',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'create_0': {
'input_type': create_0_input_type,
'output_type': type.VoidType(),
'errors': create_0_error_dict,
'input_value_validator_list': create_0_input_value_validator_list,
'output_validator_list': create_0_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'delete_0': {
'input_type': delete_0_input_type,
'output_type': type.VoidType(),
'errors': delete_0_error_dict,
'input_value_validator_list': delete_0_input_value_validator_list,
'output_validator_list': delete_0_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'get_0': {
'input_type': get_0_input_type,
'output_type': type.VoidType(),
'errors': get_0_error_dict,
'input_value_validator_list': get_0_input_value_validator_list,
'output_validator_list': get_0_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'performaction': {
'input_type': performaction_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': performaction_error_dict,
'input_value_validator_list': performaction_input_value_validator_list,
'output_validator_list': performaction_output_validator_list,
'task_type': TaskType.NONE,
},
'restartinventorysync': {
'input_type': restartinventorysync_input_type,
'output_type': type.VoidType(),
'errors': restartinventorysync_error_dict,
'input_value_validator_list': restartinventorysync_input_value_validator_list,
'output_validator_list': restartinventorysync_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
'update_0': {
'input_type': update_0_input_type,
'output_type': type.VoidType(),
'errors': update_0_error_dict,
'input_value_validator_list': update_0_input_value_validator_list,
'output_validator_list': update_0_output_validator_list,
'task_type': TaskType.NONE,
},
'upgradeinfra': {
'input_type': upgradeinfra_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'Node')]),
'errors': upgradeinfra_error_dict,
'input_value_validator_list': upgradeinfra_input_value_validator_list,
'output_validator_list': upgradeinfra_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'create_0': create_0_rest_metadata,
'delete': delete_rest_metadata,
'delete_0': delete_0_rest_metadata,
'get': get_rest_metadata,
'get_0': get_0_rest_metadata,
'list': list_rest_metadata,
'performaction': performaction_rest_metadata,
'restartinventorysync': restartinventorysync_rest_metadata,
'update': update_rest_metadata,
'update_0': update_0_rest_metadata,
'upgradeinfra': upgradeinfra_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _VifsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'host_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'lport_attachment_id': type.OptionalType(type.StringType()),
'owner_vm_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'vm_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/vifs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'host_id': 'host_id',
'included_fields': 'included_fields',
'lport_attachment_id': 'lport_attachment_id',
'owner_vm_id': 'owner_vm_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'vm_id': 'vm_id',
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualNetworkInterfaceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.vifs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _VirtualMachinesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'display_name': type.OptionalType(type.StringType()),
'external_id': type.OptionalType(type.StringType()),
'host_id': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/fabric/virtual-machines',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'display_name': 'display_name',
'external_id': 'external_id',
'host_id': 'host_id',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for updatetags operation
updatetags_input_type = type.StructType('operation-input', {
'virtual_machine_tag_update': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualMachineTagUpdate'),
})
updatetags_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
updatetags_input_value_validator_list = [
]
updatetags_output_validator_list = [
]
updatetags_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/fabric/virtual-machines?action=update_tags',
request_body_parameter='virtual_machine_tag_update',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'VirtualMachineListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'updatetags': {
'input_type': updatetags_input_type,
'output_type': type.VoidType(),
'errors': updatetags_error_dict,
'input_value_validator_list': updatetags_input_value_validator_list,
'output_validator_list': updatetags_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
'updatetags': updatetags_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.fabric.virtual_machines',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'ComputeCollectionFabricTemplates': ComputeCollectionFabricTemplates,
'ComputeCollections': ComputeCollections,
'ComputeManagers': ComputeManagers,
'DiscoveredNodes': DiscoveredNodes,
'Nodes': Nodes,
'Vifs': Vifs,
'VirtualMachines': VirtualMachines,
'compute_managers': 'com.vmware.nsx.fabric.compute_managers_client.StubFactory',
'nodes': 'com.vmware.nsx.fabric.nodes_client.StubFactory',
}
| 44.832931
| 161
| 0.593446
| 12,985
| 130,150
| 5.713362
| 0.038121
| 0.06927
| 0.083585
| 0.102874
| 0.891654
| 0.87746
| 0.849073
| 0.828948
| 0.812584
| 0.805521
| 0
| 0.001568
| 0.299262
| 130,150
| 2,902
| 162
| 44.84838
| 0.811888
| 0.273162
| 0
| 0.707424
| 1
| 0.001638
| 0.308974
| 0.198952
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024563
| false
| 0
| 0.00655
| 0
| 0.069869
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
454aa25823500204a4e665ef8eb58efa3251d028
| 10,565
|
py
|
Python
|
required_modules/advanced_decoder.py
|
maiyuren/Hybrid-Quantum-Autoencoder
|
4c176519b0f0fe1a644beafedc0c7754bdb65435
|
[
"MIT"
] | 1
|
2021-10-02T23:18:18.000Z
|
2021-10-02T23:18:18.000Z
|
required_modules/advanced_decoder.py
|
maiyuren/Hybrid-Quantum-Autoencoder
|
4c176519b0f0fe1a644beafedc0c7754bdb65435
|
[
"MIT"
] | null | null | null |
required_modules/advanced_decoder.py
|
maiyuren/Hybrid-Quantum-Autoencoder
|
4c176519b0f0fe1a644beafedc0c7754bdb65435
|
[
"MIT"
] | null | null | null |
import sys
import gaussian_decoding as gd
class EnhancedDecoder(gd.nn.Module):
name = 'EnhancedDecoder'
def __init__(self, input_size, n_qubits, q_depth, q_delta, reg_size=2, gate_type='ry', shots=None):
"""
:param input_size: input size
:param n_qubits: number of qubits
:param q_depth: number of layers of the parameterised rotations
:param q_delta: Initial spread of random quantum weights
"""
super().__init__()
self.input_size = input_size
self.n_qubits = n_qubits
gd.reset_q_num(n_qubits)
self.q_depth = q_depth
self.q_delta = q_delta
self.gate_type = gate_type
self.train_shots = shots
if gate_type == 'ry':
self.num_params = self.q_depth * n_qubits
gd.reset_q_num(n_qubits, set_gate_type=gate_type)
elif gate_type == 'u2':
gd.reset_q_num(n_qubits, set_gate_type=gate_type)
self.num_params = 2 * self.q_depth * n_qubits
gd.reset_q_num(self.n_qubits, set_gate_type=self.gate_type)
if shots:
gd.reset_aer(self.n_qubits, shots=shots)
if reg_size == 'binary':
self.reg_size = n_qubits
self.binary = True
else:
assert reg_size <= n_qubits
self.reg_size = reg_size
self.binary = False
gd.reset_q_num(self.n_qubits, set_gate_type=self.gate_type)
if shots:
gd.reset_aer(self.n_qubits, shots=shots)
# pass
self.q_params_mean = gd.nn.Parameter(q_delta * gd.torch.randn(self.num_params))
self.q_params_std = gd.nn.Parameter(q_delta * gd.torch.randn(self.num_params))
classical_layer1_size = 100
self.c_layer1_mean = gd.nn.Linear(n_qubits, classical_layer1_size)
self.c_layer2_mean = gd.nn.Linear(classical_layer1_size, n_qubits * 2)
self.c_out_mean = gd.nn.Linear(n_qubits * 2, self.reg_size)
self.c_layer1_std = gd.nn.Linear(n_qubits, classical_layer1_size)
self.c_layer2_std = gd.nn.Linear(classical_layer1_size, n_qubits * 2)
self.c_out_std = gd.nn.Linear(n_qubits * 2, self.reg_size)
if reg_size == 'binary':
self.reg_size = n_qubits
self.binary = True
else:
assert reg_size <= n_qubits
self.reg_size = reg_size
self.binary = False
self.model_info = {'input_size': input_size, 'n_qubits': n_qubits, 'q_depth': q_depth,
'q_delta': q_delta, 'reg_size': reg_size, 'model': 'EnhancedDecoder'}
self.train_time = 0
self.distributions = None
self.train_logistic_k = None
self.train_p_fuzz = None
self.batch_size = None
self.last_state = [None, None]
def forward(self, x, save_state=False, sample_num=None):
q_in = x
q_out = gd.torch.Tensor(0, 2 * self.reg_size)
# q_out = q_out.to(self.device)
# Because it comes as a batch (matrix) we cant just send the matrix in altogether like for the other layers
# but incase it has come by itself
if len(q_in.shape) == 1:
if isinstance(q_in, gd.np.ndarray):
q_in = gd.np.array([q_in])
else:
q_in = q_in.unsqueeze(0)
for elem in q_in:
q_out_elem_mean = gd.q_net(self.q_params_mean, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth, reg_size=self.n_qubits).float()
print("Out of ----- ", q_out_elem_mean)
q_out_elem_mean = gd.torch.sigmoid(self.c_layer1_mean(q_out_elem_mean))
q_out_elem_mean = gd.torch.sigmoid(self.c_layer2_mean(q_out_elem_mean))
q_out_elem_mean = gd.torch.tanh(self.c_out_mean(q_out_elem_mean)) * gd.np.pi / 2.0
if save_state:
m_state = gd.dev.state
q_out_elem_std = gd.q_net(self.q_params_std, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth, reg_size=self.n_qubits).float()
q_out_elem_std = gd.torch.sigmoid(self.c_layer1_std(q_out_elem_std))
q_out_elem_std = gd.torch.sigmoid(self.c_layer2_std(q_out_elem_std))
q_out_elem_std = gd.torch.tanh(self.c_out_std(q_out_elem_std)) * gd.np.pi / 2.0
if save_state:
self.last_state = [m_state, gd.dev.state]
q_out_elem = gd.torch.cat((q_out_elem_mean, q_out_elem_std)).unsqueeze(0)
if sample_num:
q_out_elem = q_out_elem + gd.torch.tensor(gd.np.random.random(16) - 0.5) / sample_num ** 0.5
q_out_elem = q_out_elem.float()
# Transformation of the output to be inbetween 0 and 1
if self.train_logistic_k:
q_out_elem = gd.sigmoid_k(q_out_elem, self.train_logistic_k)
else:
q_out_elem = (q_out_elem + 1) / 2
q_out = gd.torch.cat((q_out, q_out_elem))
return q_out
class AdvancedDecoder(gd.nn.Module):
name = 'AdvancedDecoder'
def __init__(self, input_size, n_qubits, q_depth, q_delta, reg_size=2, gate_type='ry', shots=None):
"""
:param input_size: input size
:param n_qubits: number of qubits
:param q_depth: number of layers of the parameterised rotations
:param q_delta: Initial spread of random quantum weights
"""
super().__init__()
self.input_size = input_size
self.n_qubits = n_qubits
gd.reset_q_num(n_qubits)
self.gate_type = gate_type
self.train_shots = shots
if isinstance(q_depth, int) or isinstance(q_depth, float):
self.q_depth = [q_depth, q_depth]
else:
self.q_depth = q_depth
if gate_type == 'ry':
self.num_params = [self.q_depth[0]*n_qubits, self.q_depth[1]*n_qubits]
gd.reset_q_num(n_qubits, set_gate_type=gate_type)
elif gate_type == 'u2':
self.num_params = [2 * self.q_depth[0]*n_qubits, 2 * self.q_depth[1]*n_qubits]
gd.reset_q_num(n_qubits, set_gate_type=gate_type)
if reg_size == 'binary':
self.reg_size = n_qubits
self.binary = True
else:
assert reg_size <= n_qubits
self.reg_size = reg_size
self.binary = False
self.q_params_mean = gd.nn.Parameter(q_delta * gd.torch.randn(self.num_params[0]))
self.q_params_std = gd.nn.Parameter(q_delta * gd.torch.randn(self.num_params[0]))
classical_layer1_size = 100
self.c_layer1_mean = gd.nn.Linear(n_qubits, classical_layer1_size)
self.c_layer2_mean = gd.nn.Linear(classical_layer1_size, self.num_params[1] * 2)
self.c_out_mean = gd.nn.Linear(self.num_params[1] * 2, self.num_params[1])
self.c_layer1_std = gd.nn.Linear(n_qubits, classical_layer1_size)
self.c_layer2_std = gd.nn.Linear(classical_layer1_size, self.num_params[1] * 2)
self.c_out_std = gd.nn.Linear(self.num_params[1] * 2, self.num_params[1])
self.model_info = {'input_size':input_size, 'n_qubits':n_qubits, 'q_depth':q_depth,
'q_delta':q_delta, 'reg_size':reg_size, 'model': 'AdvancedDecoder'}
self.train_time = 0
self.distributions = None
self.batch_size = None
self.train_logistic_k = None
self.train_p_fuzz = None
self.last_state = [None, None]
def forward(self, x, save_state=False, sample_num=None):
q_in = x
q_out = gd.torch.Tensor(0, 2 * self.reg_size)
# q_out = q_out.to(self.device)
# Because it comes as a batch (matrix) we cant just send the matrix in altogether like for the other layers
# but incase it has come by itself
if len(q_in.shape) == 1:
if isinstance(q_in, gd.np.ndarray):
q_in = gd.np.array([q_in])
else:
q_in = q_in.unsqueeze(0)
for elem in q_in:
q_out_elem_mean = gd.q_net(self.q_params_mean, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth[0], reg_size=self.n_qubits).float()
# q_out_elem_mean.to(device)
q_out_elem_mean = gd.torch.sigmoid(self.c_layer1_mean(q_out_elem_mean))
q_out_elem_mean = gd.torch.sigmoid(self.c_layer2_mean(q_out_elem_mean))
q_out_elem_mean = gd.torch.tanh(self.c_out_mean(q_out_elem_mean)) * gd.np.pi / 2.0
q_out_elem_mean = gd.q_net(q_out_elem_mean, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth[1], reg_size=self.reg_size).float()
if save_state:
m_state = gd.dev.state
q_out_elem_std = gd.q_net(self.q_params_std, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth[0], reg_size=self.n_qubits).float()
# q_out_elem_mean.to(device)
q_out_elem_std = gd.torch.sigmoid(self.c_layer1_std(q_out_elem_std))
q_out_elem_std = gd.torch.sigmoid(self.c_layer2_std(q_out_elem_std))
q_out_elem_std = gd.torch.tanh(self.c_out_std(q_out_elem_std)) * gd.np.pi / 2.0
q_out_elem_std = gd.q_net(q_out_elem_std, n_qubits=self.n_qubits, amplitudes=elem,
q_depth=self.q_depth[1], reg_size=self.reg_size).float()
if save_state:
self.last_state = [m_state, gd.dev.state]
q_out_elem = gd.torch.cat((q_out_elem_mean, q_out_elem_std)).unsqueeze(0)
if sample_num:
q_out_elem = q_out_elem + gd.torch.tensor(gd.np.random.random(16) - 0.5) / sample_num ** 0.5
q_out_elem = q_out_elem.float()
# Transformation of the output to be inbetween 0 and 1
if self.train_logistic_k:
q_out_elem = gd.sigmoid_k(q_out_elem, self.train_logistic_k)
else:
q_out_elem = (q_out_elem + 1) / 2
q_out = gd.torch.cat((q_out, q_out_elem))
return q_out
| 44.578059
| 116
| 0.59044
| 1,579
| 10,565
| 3.60228
| 0.086764
| 0.04993
| 0.082982
| 0.044304
| 0.956927
| 0.949015
| 0.940225
| 0.930204
| 0.912623
| 0.899437
| 0
| 0.014178
| 0.312352
| 10,565
| 236
| 117
| 44.766949
| 0.768754
| 0.09115
| 0
| 0.823529
| 0
| 0
| 0.020987
| 0
| 0
| 0
| 0
| 0
| 0.017647
| 1
| 0.023529
| false
| 0
| 0.011765
| 0
| 0.070588
| 0.005882
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
45b324418f3e5410c11b25af054f25136ee22c4c
| 131
|
py
|
Python
|
cartoonify/app/image_processor/__init__.py
|
theendsofinvention/cartoonify
|
a922ff0e24d0671c83073f5338a5820e3afd1dfc
|
[
"MIT"
] | 1,991
|
2018-07-03T16:12:28.000Z
|
2022-03-31T22:57:33.000Z
|
cartoonify/app/image_processor/__init__.py
|
theendsofinvention/cartoonify
|
a922ff0e24d0671c83073f5338a5820e3afd1dfc
|
[
"MIT"
] | 40
|
2018-07-05T11:12:53.000Z
|
2022-03-11T23:26:34.000Z
|
cartoonify/app/image_processor/__init__.py
|
whiteboarddan/cartoonify
|
39ea84d96b3e93f0480e6d6158bea506d01278ca
|
[
"MIT"
] | 201
|
2018-07-04T16:16:57.000Z
|
2022-03-10T14:39:04.000Z
|
from .imageprocessor import ImageProcessor
from .imageprocessor import tensorflow_model_name
from .imageprocessor import model_path
| 43.666667
| 49
| 0.89313
| 15
| 131
| 7.6
| 0.466667
| 0.473684
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083969
| 131
| 3
| 50
| 43.666667
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b3699988cf74a856c5921b0461fd5edecedab72b
| 123
|
py
|
Python
|
cpab/cpa1d/utils/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | 17
|
2016-03-16T21:35:36.000Z
|
2021-11-11T04:16:21.000Z
|
cpab/cpaHd/utils/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | null | null | null |
cpab/cpaHd/utils/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | 4
|
2016-08-12T23:02:09.000Z
|
2019-03-14T18:20:36.000Z
|
#from _create_cells import create_cells
#from _create_verts_and_H import create_verts_and_H
from constraints import *
| 15.375
| 52
| 0.837398
| 19
| 123
| 4.894737
| 0.421053
| 0.215054
| 0.301075
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138211
| 123
| 7
| 53
| 17.571429
| 0.877358
| 0.723577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2fdb716c5153951ce09bc9e9a50fa2ee4853b221
| 14,732
|
py
|
Python
|
test/test_session.py
|
willforde/urlquick
|
a62f6e81a1831436b04baef4faa0f60c93d98936
|
[
"MIT"
] | 7
|
2017-08-15T23:57:35.000Z
|
2022-03-21T23:22:32.000Z
|
test/test_session.py
|
rrosajp/urlquick
|
56312c9393c212d9e982f583af9669c1c9448b6f
|
[
"MIT"
] | 31
|
2017-04-14T13:54:17.000Z
|
2022-03-28T11:11:43.000Z
|
test/test_session.py
|
rrosajp/urlquick
|
56312c9393c212d9e982f583af9669c1c9448b6f
|
[
"MIT"
] | 2
|
2017-03-22T02:51:01.000Z
|
2021-02-27T16:26:41.000Z
|
import urlquick
import requests
import shutil
import pytest
import time
@pytest.mark.parametrize("obj", [urlquick, urlquick.Session()])
class TestSessionClean(object):
"""Clean the database before each and every test."""
# noinspection PyMethodMayBeStatic
def setup_method(self):
"""Remove cache location before each test."""
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
def test_get(self, obj, requests_mock):
mocked = requests_mock.get('https://www.test.com/test/586', body=b"data")
ret = obj.get('https://www.test.com/test/586')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
assert ret.text == "data"
def test_options(self, obj, requests_mock):
mocked = requests_mock.options('https://www.test.com', json={"test": True})
ret = obj.options('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
def test_head(self, obj, requests_mock):
mocked = requests_mock.head('https://www.test.com', headers={"X-TEST": "12345"})
ret = obj.head('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b""
assert ret.text == ""
assert "X-TEST" in ret.headers and ret.headers["X-TEST"] == "12345"
def test_post(self, obj, requests_mock):
mocked = requests_mock.post('https://www.test.com', json={"test": True}, data=b"test")
ret = obj.post('https://www.test.com', data=b"test")
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
def test_put(self, obj, requests_mock):
mocked = requests_mock.put('https://www.test.com', json={"test": True})
ret = obj.put('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
def test_patch(self, obj, requests_mock):
mocked = requests_mock.patch('https://www.test.com', json={"test": True})
ret = obj.patch('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
def test_delete(self, obj, requests_mock):
mocked = requests_mock.delete('https://www.test.com', json={"test": True})
ret = obj.delete('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
def test_headers_none(self, obj, requests_mock):
mocked = requests_mock.get('https://www.test.com/50', json={"test": True})
ret = obj.get('https://www.test.com/50', headers=None)
assert mocked.called
assert ret.from_cache is False
assert ret.json() == {"test": True}
class TestSessionCaching(object):
"""Clean the database before each and every test."""
# noinspection PyMethodMayBeStatic
def setup_method(self):
"""Remove cache location before each test."""
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
def test_cache(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data")
ret = urlquick.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
ret = urlquick.get('https://www.test.com/1')
assert not mocked.called
assert ret.from_cache is True
assert ret.content == b"data"
def test_delay(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data")
ret = urlquick.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
time.sleep(1.2) # 1.2 seconds should be enough
ret = urlquick.get('https://www.test.com/1', max_age=1)
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_disable_flag(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data")
ret = urlquick.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
ret = urlquick.get('https://www.test.com/1', max_age=-1)
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_never_valid(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data")
ret = urlquick.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
ret = urlquick.get('https://www.test.com/1', max_age=0)
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_etag(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data", headers={"Etag": "12345"})
ret = urlquick.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
ret = urlquick.get('https://www.test.com/1', max_age=0)
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_last_modified(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"test 304", headers={"Last-modified": "12345"})
ret = urlquick.get('https://www.test.com/1') # Gets cached
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"test 304"
mocked = requests_mock.get('https://www.test.com/1', headers={"Last-modified": "12345"}, status=304)
ret = urlquick.get('https://www.test.com/1', max_age=0)
assert mocked.called
assert ret.from_cache is True
assert ret.content == b"test 304"
def test_wipe(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data")
session = urlquick.Session()
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
# Wipe the cache clean
session.cache_adapter.wipe()
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_delete(self, requests_mock):
url = 'https://www.test.com/1'
mocked = requests_mock.get(url, body=b"data")
session = urlquick.Session()
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
mocked.reset_stats()
# Build Request object
req = requests.PreparedRequest()
req.prepare_method("GET")
req.prepare_url(url, None)
req.prepare_headers(None)
req.prepare_body(b"", None, None)
# Test del_cache
urlhash = urlquick.hash_url(req)
session.cache_adapter.del_cache(urlhash)
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
class TestRaiseForStatus(object):
"""Clean the database before each and every test."""
# noinspection PyMethodMayBeStatic
def setup_method(self):
"""Remove cache location before each test."""
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
def test_false_normal(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data", status=200)
session = urlquick.Session()
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.status_code == 200
assert ret.content == b"data"
def test_false_error(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data", status=404)
session = urlquick.Session()
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.status_code == 404
assert ret.content == b"data"
def test_true_normal(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data", status=200)
session = urlquick.Session(raise_for_status=True)
ret = session.get('https://www.test.com/1')
assert mocked.called
assert ret.from_cache is False
assert ret.status_code == 200
assert ret.content == b"data"
def test_true_error(self, requests_mock):
mocked = requests_mock.get('https://www.test.com/1', body=b"data", status=404)
session = urlquick.Session(raise_for_status=True)
with pytest.raises(urlquick.HTTPError):
session.get('https://www.test.com/1')
assert mocked.called
def test_session_send(requests_mock):
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
url = 'https://www.test.com/1'
mocked = requests_mock.get(url, body=b"data")
session = urlquick.Session()
# Build Request object
req = requests.PreparedRequest()
req.prepare_method("GET")
req.prepare_url(url, None)
req.prepare_headers(None)
req.prepare_body(b"", None, None)
ret = session.send(req)
assert mocked.called
assert ret.content == b"data"
def test_request_header_none(requests_mock):
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
mocked = requests_mock.get('https://www.test.com/test/542', body=b"data")
session = urlquick.Session()
ret = session.request("GET", 'https://www.test.com/test/542', None, None, None)
assert mocked.called
assert ret.content == b"data"
def test_request_header_data(requests_mock):
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
mocked = requests_mock.get('https://www.test.com/test/542', body=b"data")
session = urlquick.Session()
ret = session.request("GET", 'https://www.test.com/test/542', None, None, {"X-TEST": "test"})
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
def test_session_method(requests_mock):
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
mocked = requests_mock.get('https://www.test.com', body=b"data")
session = urlquick.session()
ret = session.get('https://www.test.com')
assert mocked.called
assert ret.from_cache is False
assert ret.content == b"data"
assert ret.text == "data"
def test_cache_unsupported_protocol(mocker, requests_mock):
"""Test that get_cache will clear the cache on error."""
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
mocked_url_1 = requests_mock.get('https://www.test.com/1', body=b"test1")
mocked_url_2 = requests_mock.get('https://www.test.com/2', body=b"test2")
session = urlquick.Session()
# Check that the mocked url is called
ret = session.get('https://www.test.com/1')
assert mocked_url_1.called
assert ret.from_cache is False
assert ret.content == b"test1"
mocked_url_1.reset_stats()
ret = session.get('https://www.test.com/2')
assert mocked_url_2.called
assert ret.from_cache is False
assert ret.content == b"test2"
mocked_url_2.reset_stats()
# Should be cached now so mocked should not be called
ret = session.get('https://www.test.com/1')
assert not mocked_url_1.called
assert ret.from_cache is True
assert ret.content == b"test1"
mocked_url_1.reset_stats()
ret = session.get('https://www.test.com/2')
assert not mocked_url_2.called
assert ret.from_cache is True
assert ret.content == b"test2"
mocked_url_2.reset_stats()
# Mock CacheRecord to raise ValueError
mocked = mocker.patch("urlquick.CacheRecord")
mocked.side_effect = ValueError("unsupported pickle protocol")
# For a unsupported pickle protocol the whole cache is wiped so both should be called
ret = session.get('https://www.test.com/1')
assert mocked_url_1.called
assert ret.from_cache is False
assert ret.content == b"test1"
mocked.stopall()
# This should be called again
ret = session.get('https://www.test.com/2')
assert mocked_url_2.called
assert ret.from_cache is False
assert ret.content == b"test2"
def test_cache_unknown_error(mocker, requests_mock):
"""Test that get_cache will clear the cache on error."""
shutil.rmtree(urlquick.CACHE_LOCATION, ignore_errors=True)
mocked_url_1 = requests_mock.get('https://www.test.com/1', body=b"test1")
mocked_url_2 = requests_mock.get('https://www.test.com/2', body=b"test2")
session = urlquick.Session()
# Check that the mocked url is called
ret = session.get('https://www.test.com/1')
assert mocked_url_1.called
assert ret.from_cache is False
assert ret.content == b"test1"
mocked_url_1.reset_stats()
ret = session.get('https://www.test.com/2')
assert mocked_url_2.called
assert ret.from_cache is False
assert ret.content == b"test2"
mocked_url_2.reset_stats()
# Should be cached now so mocked should not be called
ret = session.get('https://www.test.com/1')
assert not mocked_url_1.called
assert ret.from_cache is True
assert ret.content == b"test1"
mocked_url_1.reset_stats()
ret = session.get('https://www.test.com/2')
assert not mocked_url_2.called
assert ret.from_cache is True
assert ret.content == b"test2"
mocked_url_2.reset_stats()
# Mock CacheRecord to raise ValueError
mocked = mocker.patch.object(urlquick, "CacheRecord")
mocked.side_effect = ValueError("normal error")
# For normal errors only the current cache item
# will be remove but all the rest will stay
ret = session.get('https://www.test.com/1')
assert mocked_url_1.called
assert ret.from_cache is False
assert ret.content == b"test1"
mocker.stopall()
# This request should not be called again
ret = session.get('https://www.test.com/2')
assert not mocked_url_2.called
assert ret.from_cache is True
assert ret.content == b"test2"
| 36.375309
| 114
| 0.653408
| 2,080
| 14,732
| 4.50625
| 0.071635
| 0.086418
| 0.09218
| 0.115225
| 0.890323
| 0.879654
| 0.867278
| 0.82951
| 0.825669
| 0.801984
| 0
| 0.015515
| 0.216875
| 14,732
| 404
| 115
| 36.465347
| 0.796914
| 0.072699
| 0
| 0.740984
| 0
| 0
| 0.153518
| 0
| 0
| 0
| 0
| 0
| 0.442623
| 1
| 0.095082
| false
| 0
| 0.016393
| 0
| 0.121311
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2fea3353ac6493d28d545e7fbb0e5f13a9ff260d
| 310,487
|
py
|
Python
|
code/qgis_projects.py
|
wupperinst/osmTGmod
|
ce70ba30b2b101cfccde9cc9d6fff3f0e5a12083
|
[
"Apache-2.0"
] | 6
|
2016-12-02T14:08:33.000Z
|
2022-01-20T09:32:10.000Z
|
code/qgis_projects.py
|
wupperinst/osmTGmod
|
ce70ba30b2b101cfccde9cc9d6fff3f0e5a12083
|
[
"Apache-2.0"
] | 18
|
2016-05-03T15:35:31.000Z
|
2017-12-04T15:21:35.000Z
|
code/qgis_projects.py
|
wupperinst/osmTGmod
|
ce70ba30b2b101cfccde9cc9d6fff3f0e5a12083
|
[
"Apache-2.0"
] | 5
|
2016-03-25T11:09:41.000Z
|
2021-04-20T09:47:21.000Z
|
###################################################################################
# #
# Copyright "2015" "Wuppertal Institut" #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
###################################################################################
import os
def write_projects(database,
password,
host,
port,
user):
qgis_proj_dir = os.path.dirname(os.getcwd()) + "/qgis_projects"# gets one above current wd and then other folder
result_project = """
<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
<qgis projectname="" version="2.10.1-Pisa">
<title></title>
<layer-tree-group expanded="1" checked="Qt::PartiallyChecked" name="">
<customproperties/>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="view_bus_data20151217121906009" name="view_bus_data">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="view_branch_data20151217133637741" name="view_branch_data">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="view_dcline_data20151217133332218" name="view_dcline_data">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Unchecked" id="view_problem_log20151217121906087" name="view_problem_log">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="view_substations20151217121906140" name="view_substations">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<relations/>
<mapcanvas>
<units>degrees</units>
<extent>
<xmin>5.67446690360767469</xmin>
<ymin>47.15859863826560883</ymin>
<xmax>15.04628747357738838</xmax>
<ymax>55.14285135188627152</ymax>
</extent>
<rotation>0</rotation>
<projections>0</projections>
<destinationsrs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</destinationsrs>
<layer_coordinate_transform_info/>
</mapcanvas>
<visibility-presets/>
<layer-tree-canvas>
<custom-order enabled="0">
<item>view_bus_data20151217121906009</item>
<item>view_problem_log20151217121906087</item>
<item>view_substations20151217121906140</item>
<item>view_dcline_data20151217133332218</item>
<item>view_branch_data20151217133637741</item>
</custom-order>
</layer-tree-canvas>
<legend updateDrawingOrder="true">
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="view_bus_data" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="view_bus_data20151217121906009" visible="1"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="view_branch_data" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="view_branch_data20151217133637741" visible="1"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="view_dcline_data" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="view_dcline_data20151217133332218" visible="1"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Unchecked" name="view_problem_log" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="view_problem_log20151217121906087" visible="0"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="view_substations" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="view_substations20151217121906140" visible="1"/>
</filegroup>
</legendlayer>
</legend>
<projectlayers layercount="5">
<maplayer minimumScale="-4.65661e-10" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>view_branch_data20151217133637741</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='view_id' srid=4326 type=MultiLineString table="results"."view_branch_data" (geom) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>view_branch_data</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression>COALESCE("relation_ids", '<NULL>')</previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="f_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="t_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_r">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_x">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_b">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_a">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_b">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_c">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tap">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="shift">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_status">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="link_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="branch_voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{18fa568f-4887-48df-832e-32e09f18cf55}">
<rule filter=" "branch_voltage" >= 380000 AND link_type = 'line'" key="{fb6cf3fa-a11d-4246-915a-57f1abc40a4a}" symbol="0" label=">= 380"/>
<rule filter=" "branch_voltage" = 220000 AND link_type= 'line'" key="{d84c0c58-913e-4424-9343-8eb90ab7465e}" symbol="1" label="220 kV"/>
<rule filter="link_type= 'cable'" key="{db5cbfb8-beb8-4d3c-8349-559cb35844a1}" symbol="2" label="Erdkabel"/>
<rule filter=" link_type = 'transformer' " key="{3d4ecf28-828a-4557-97be-f6433c5d5837}" symbol="3" label="Transformator"/>
</rules>
<symbols>
<symbol alpha="1" clip_to_extent="1" type="line" name="0">
<layer pass="2" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="255,127,0,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="1">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="51,160,44,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="2">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,0,0,255"/>
<prop k="line_style" v="dot"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="3">
<layer pass="1" class="MarkerLine" locked="0">
<prop k="interval" v="3"/>
<prop k="interval_map_unit_scale" v="0,0"/>
<prop k="interval_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_along_line" v="0"/>
<prop k="offset_along_line_map_unit_scale" v="0,0"/>
<prop k="offset_along_line_unit" v="MM"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="placement" v="vertex"/>
<prop k="rotate" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
<symbol alpha="1" clip_to_extent="1" type="marker" name="@3@0">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="187,51,53,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,0"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2.4"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="2"/>
<property key="labeling/placementFlags" value="10"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>0</layerTransparency>
<displayfield>relation_ids</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Beschriftung"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>../../../PROGRA~1/QGISWI~1/bin</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>../../../PROGRA~1/QGISWI~1/bin</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="f_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="t_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_r">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_x">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_b">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_a">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_b">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="rate_c">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tap">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="shift">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_status">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="link_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="branch_voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="0" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Point" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>view_bus_data20151217121906009</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='view_id' srid=4326 type=Point table="results"."view_bus_data" (geom) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>view_bus_data</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression></previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_i">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pd">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qd">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="gs">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bs">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_area">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vm">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="va">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="base_kv">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="zone">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vmax">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vmin">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_substation_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cntr_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{4a019d98-9843-463d-a59b-0514046fec8d}">
<rule filter=" "osm_substation_id" IS NULL" key="{db87f334-0ae8-4ee2-bd21-9454c35f0567}" symbol="0" label="Einfacher Netzknoten"/>
<rule filter="NOT "osm_substation_id" IS NULL" key="{f847291a-f51b-4ae6-b45e-32b799fa6c0d}" symbol="1" label="Umspannwerk"/>
</rules>
<symbols>
<symbol alpha="1" clip_to_extent="1" type="marker" name="0">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="0,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="0.8"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="marker" name="1">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="31,120,180,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,0"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.8"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2.3"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="0"/>
<property key="labeling/placementFlags" value="0"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>19</layerTransparency>
<displayfield>id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Beschriftung"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="0" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>../../../PROGRA~1/QGISWI~1/bin</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>../../../PROGRA~1/QGISWI~1/bin</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_i">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pd">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qd">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="gs">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bs">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="bus_area">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vm">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="va">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="base_kv">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="zone">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vmax">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vmin">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_substation_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cntr_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>view_dcline_data20151217133332218</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='view_id' srid=4326 type=MultiLineString table="results"."view_dcline_data" (geom) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>view_dcline_data</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression></previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="f_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="t_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_status">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pmin">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pmax">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qminf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmaxf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmint">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmaxt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="loss0">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="loss1">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="link_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="branch_voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="singleSymbol">
<symbols>
<symbol alpha="1" clip_to_extent="1" type="line" name="0">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="158,98,144,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.46"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale scalemethod="diameter"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="2"/>
<property key="labeling/placementFlags" value="10"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>0</layerTransparency>
<displayfield>branch_id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Label"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>.</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>.</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="f_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="t_bus">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="br_status">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="vt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pmin">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="pmax">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qminf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmaxf">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmint">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="qmaxt">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="loss0">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="loss1">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="link_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="branch_voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>view_problem_log20151217121906087</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='view_id' srid=4326 type=MultiLineString table="results"."view_problem_log" (way) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>view_problem_log</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression>COALESCE( "result_id", '<NULL>' )</previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="object_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="line_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="relation_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="problem">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{d9ac1049-afe9-42b1-92b0-6ead05fdde9c}">
<rule filter=" "problem" ='dead_end' " key="{1d1dd98b-15e9-4dad-a1ce-8cae486a5db3}" symbol="0" label="Dead End"/>
<rule filter=" "problem" = 'missing_cables' " key="{fc0fc7aa-e6e8-4222-ba6e-678be1d7d26d}" symbol="1" label="Missing Cables"/>
<rule filter=" "problem" = 'cable_conflict' " key="{a6456595-dc60-4a50-a8d2-ada7ddf972fa}" symbol="2" label="Cable Conflict"/>
<rule filter=" "problem" = 'branch_off_(cables_>_3)' " key="{5a885aab-4d8b-429d-b600-f714a694f50d}" symbol="3" label="Branch Off"/>
<rule filter=" "problem" = 'too_many_circuits_on_power_line' " key="{d04c5b53-5f07-493e-b34a-8e7d9d3afb66}" symbol="4" label="Too many Circuits on Power line"/>
<rule filter=" "problem" = 'voltage_missing_on_power_line' " key="{635bae8c-6f86-4953-8a63-e33ab94f0e2a}" symbol="5" label="Circuit Voltage missing on Power line"/>
</rules>
<symbols>
<symbol alpha="1" clip_to_extent="1" type="line" name="0">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="31,120,180,255"/>
<prop k="line_style" v="dash"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="1">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="255,11,60,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="2">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="220,10,196,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="3">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="19,73,110,255"/>
<prop k="line_style" v="dot"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="4">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="242,229,43,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="5">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="71,163,164,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.4"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="2"/>
<property key="labeling/placementFlags" value="10"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>0</layerTransparency>
<displayfield>line_id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Label"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>.</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>.</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="object_type">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="line_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="relation_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="problem">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="0" maximumScale="587421" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Polygon" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="1" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>view_substations20151217121906140</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='view_id' srid=4326 type=Polygon table="results"."view_substations" (geom) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>view_substations</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression></previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="s_long">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="center_geom">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="singleSymbol">
<symbols>
<symbol alpha="1" clip_to_extent="1" type="fill" name="0">
<layer pass="0" class="SimpleFill" locked="0">
<prop k="border_width_map_unit_scale" v="0,0"/>
<prop k="color" v="31,120,180,255"/>
<prop k="joinstyle" v="bevel"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.26"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="style" v="solid"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<rotation/>
<sizescale scalemethod="diameter"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="true"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="true"/>
<property key="labeling/fieldName" value="name"/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="9.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="false"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="1"/>
<property key="labeling/placementFlags" value="0"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="2"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>75</layerTransparency>
<displayfield>id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Label"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="0" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>.</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>.</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="result_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="view_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="s_long">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="center_geom">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
</projectlayers>
<properties>
<SpatialRefSys>
<ProjectCRSProj4String type="QString">+proj=longlat +datum=WGS84 +no_defs</ProjectCRSProj4String>
<ProjectCrs type="QString">EPSG:4326</ProjectCrs>
<ProjectCRSID type="int">3452</ProjectCRSID>
<ProjectionsEnabled type="int">0</ProjectionsEnabled>
</SpatialRefSys>
<Paths>
<Absolute type="bool">false</Absolute>
</Paths>
<Gui>
<SelectionColorBluePart type="int">0</SelectionColorBluePart>
<CanvasColorGreenPart type="int">255</CanvasColorGreenPart>
<CanvasColorRedPart type="int">255</CanvasColorRedPart>
<SelectionColorRedPart type="int">255</SelectionColorRedPart>
<SelectionColorAlphaPart type="int">255</SelectionColorAlphaPart>
<SelectionColorGreenPart type="int">255</SelectionColorGreenPart>
<CanvasColorBluePart type="int">255</CanvasColorBluePart>
</Gui>
<Digitizing>
<DefaultSnapToleranceUnit type="int">2</DefaultSnapToleranceUnit>
<LayerSnappingList type="QStringList">
<value>view_branch_data20151217133637741</value>
<value>view_bus_data20151217121906009</value>
<value>view_dcline_data20151217133332218</value>
<value>view_problem_log20151217121906087</value>
<value>view_substations20151217121906140</value>
</LayerSnappingList>
<LayerSnappingEnabledList type="QStringList">
<value>disabled</value>
<value>disabled</value>
<value>disabled</value>
<value>disabled</value>
<value>disabled</value>
</LayerSnappingEnabledList>
<SnappingMode type="QString">current_layer</SnappingMode>
<AvoidIntersectionsList type="QStringList"/>
<LayerSnappingToleranceUnitList type="QStringList">
<value>2</value>
<value>2</value>
<value>2</value>
<value>2</value>
<value>2</value>
</LayerSnappingToleranceUnitList>
<LayerSnapToList type="QStringList">
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
</LayerSnapToList>
<DefaultSnapType type="QString">off</DefaultSnapType>
<DefaultSnapTolerance type="double">0</DefaultSnapTolerance>
<LayerSnappingToleranceList type="QStringList">
<value>0.000000</value>
<value>0.000000</value>
<value>0.000000</value>
<value>0.000000</value>
<value>0.000000</value>
</LayerSnappingToleranceList>
</Digitizing>
<PositionPrecision>
<DecimalPlaces type="int">2</DecimalPlaces>
<Automatic type="bool">true</Automatic>
</PositionPrecision>
<Legend>
<filterByMap type="bool">false</filterByMap>
</Legend>
</properties>
</qgis>
"""
filepath = qgis_proj_dir + "/"+ database + "_results_project.qgs"
if os.path.exists(filepath) == False:
fh = open(filepath ,"w")
fh.write(result_project)
fh.close()
grid_devel_project = """
<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
<qgis projectname="" version="2.10.1-Pisa">
<title></title>
<layer-tree-group expanded="1" checked="Qt::Checked" name="">
<customproperties/>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="vw_change_log20151204114804760" name="vw_change_log">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="edit_power_relations20151214185446433" name="edit_power_relations">
<customproperties/>
</layer-tree-layer>
<layer-tree-layer expanded="1" checked="Qt::Checked" id="power_ways20151204114804618" name="power_ways">
<customproperties/>
</layer-tree-layer>
</layer-tree-group>
<relations/>
<mapcanvas>
<units>degrees</units>
<extent>
<xmin>5.9040642516733417</xmin>
<ymin>48.85804593092658621</ymin>
<xmax>12.8416930407427099</xmax>
<ymax>53.67844787833390541</ymax>
</extent>
<rotation>0</rotation>
<projections>1</projections>
<destinationsrs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</destinationsrs>
<layer_coordinate_transform_info>
<layer_coordinate_transform destAuthId="EPSG:4326" srcAuthId="EPSG:4326" srcDatumTransform="-1" destDatumTransform="-1" layerid="edit_power_relations20151214185446433"/>
<layer_coordinate_transform destAuthId="EPSG:4326" srcAuthId="EPSG:4326" srcDatumTransform="-1" destDatumTransform="-1" layerid="vw_change_log20151204114804760"/>
<layer_coordinate_transform destAuthId="EPSG:4326" srcAuthId="EPSG:4326" srcDatumTransform="-1" destDatumTransform="-1" layerid="power_ways20151204114804618"/>
</layer_coordinate_transform_info>
</mapcanvas>
<visibility-presets/>
<layer-tree-canvas>
<custom-order enabled="0">
<item>power_ways20151204114804618</item>
<item>vw_change_log20151204114804760</item>
<item>edit_power_relations20151214185446433</item>
</custom-order>
</layer-tree-canvas>
<legend updateDrawingOrder="true">
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="vw_change_log" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="vw_change_log20151204114804760" visible="1"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="edit_power_relations" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="edit_power_relations20151214185446433" visible="1"/>
</filegroup>
</legendlayer>
<legendlayer drawingOrder="-1" open="true" checked="Qt::Checked" name="power_ways" showFeatureCount="0">
<filegroup open="true" hidden="false">
<legendlayerfile isInOverview="0" layerid="power_ways20151204114804618" visible="1"/>
</filegroup>
</legendlayer>
</legend>
<projectlayers layercount="3">
<maplayer minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>edit_power_relations20151214185446433</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='id' srid=4326 type=MultiLineString table="public"."edit_power_relations" (st_union) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>edit_power_relations</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression></previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="members">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{148bb227-0993-4418-b3eb-af4ec6ac338b}">
<rule checkstate="0" key="{9909ee22-99c8-4bb8-8ee0-05535a899654}" symbol="0" label="All"/>
<rule filter=" "voltage" = 220000 OR "voltage" = 380000" key="{648e9ac3-061e-433f-b400-b3b12b80ea93}" symbol="1" label="220kV, 380kV"/>
</rules>
<symbols>
<symbol alpha="0.286275" clip_to_extent="1" type="line" name="0">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,0,0,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.09"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="0.835294" clip_to_extent="1" type="line" name="1">
<layer pass="1" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,0,0,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.18"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="5.55112e-17"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="2"/>
<property key="labeling/placementFlags" value="10"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>0</layerTransparency>
<displayfield>id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Beschriftung"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="-4.65661e-10">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>.</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>.</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="members">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>power_ways20151204114804618</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='id' srid=4326 type=LineString table="public"."power_ways" (way) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>power_ways</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression>COALESCE( "id", '<NULL>' )</previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="version">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="user_id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tstamp">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="changeset_id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="UniqueValues" name="power">
<widgetv2config fieldEditable="1" labelOnTop="0" Editable="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{dc76d4cb-b494-4520-b47b-ee6d7ca9b438}">
<rule filter="( "voltage" ILIKE '%380000%' OR "voltage" ILIKE '%220000%') AND power = 'line'" key="{966cb1d8-8143-4d23-be19-610aba027c2d}" symbol="0" label="> 220 kV (line)"/>
<rule filter=""power" = 'substation'" key="{555cb5d4-8e5d-447c-909c-ec72064c7ff8}" symbol="1" label="Substations"/>
<rule key="{eee7601b-e532-4b27-ad40-ab317567e1f4}" symbol="2" label="All"/>
</rules>
<symbols>
<symbol alpha="1" clip_to_extent="1" type="line" name="0">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="157,157,157,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="1.46"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="1">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="0,0,0,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="0.321569" clip_to_extent="1" type="line" name="2">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="157,157,157,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="0.46"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/dataDefined/AlwaysShow" value="0~~0~~CASE WHEN (power='substation' or power = 'sub_station') THEN name END~~name"/>
<property key="labeling/dataDefined/Show" value="0~~0~~~~name"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="true"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="true"/>
<property key="labeling/fieldName" value="CASE WHEN (power='substation' or power = 'sub_station') AND (voltage iLIKE '%380%' or voltage iLike '%220%')THEN name END"/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="10.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="true"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="false"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="4"/>
<property key="labeling/placementFlags" value="0"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="10"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="1000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="true"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="true"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="true"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="13"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>55</layerTransparency>
<displayfield>id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Beschriftung"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="0">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>C:/powerdata/qgis_projects/GermanyPower</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>C:/powerdata/qgis_projects/GermanyPower</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="version">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="user_id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tstamp">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="changeset_id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="UniqueValues" name="power">
<widgetv2config fieldEditable="1" labelOnTop="0" Editable="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="name">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
<maplayer minimumScale="-4.65661e-10" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="0" maxLabelScale="1e+08" simplifyDrawingTol="1" geometry="Line" simplifyMaxScale="1" type="vector" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
<id>vw_change_log20151204114804760</id>
<datasource>dbname='"""+database+"""' host="""+host+""" port="""+port+""" user='"""+user+"""' password='"""+password+"""' sslmode=disable key='id' srid=4326 type=MultiLineString table="public"."vw_change_log" (way) sql=</datasource>
<title></title>
<abstract></abstract>
<keywordList>
<value></value>
</keywordList>
<layername>vw_change_log</layername>
<srs>
<spatialrefsys>
<proj4>+proj=longlat +datum=WGS84 +no_defs</proj4>
<srsid>3452</srsid>
<srid>4326</srid>
<authid>EPSG:4326</authid>
<description>WGS 84</description>
<projectionacronym>longlat</projectionacronym>
<ellipsoidacronym>WGS84</ellipsoidacronym>
<geographicflag>true</geographicflag>
</spatialrefsys>
</srs>
<provider encoding="UTF-8">postgres</provider>
<previewExpression>COALESCE( "id", '<NULL>' )</previewExpression>
<vectorjoins/>
<expressionfields/>
<map-layer-style-manager current="">
<map-layer-style name=""/>
</map-layer-style-manager>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tstamp">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="table_ident">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="action">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="members">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="power">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="hinweis">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
<renderer-v2 symbollevels="0" type="RuleRenderer">
<rules key="{dc76d4cb-b494-4520-b47b-ee6d7ca9b438}">
<rule filter=" "action" = 'updt' AND table_ident = 'way'" key="{387f82a1-7232-48ca-bb0b-d7038db8f2b4}" symbol="0" label="Update way"/>
<rule filter=" "action" = 'isrt' AND table_ident = 'way'" key="{a69b6ea3-c14b-489e-ad92-d39a6b19be14}" symbol="1" label="Insert way"/>
<rule filter=" "action" = 'dlt' AND table_ident = 'way'" key="{65ee97d2-aa51-409b-947c-2f443b275c54}" symbol="2" label="Delete way"/>
<rule filter=" "action" = 'updt' AND table_ident = 'rel'" key="{e9efd092-d540-4f47-842a-fcd02afbc75b}" symbol="3" label="Update relation"/>
<rule filter=" "action" = 'dlt' AND table_ident = 'rel'" key="{b66240f8-61c7-4bff-abd7-a8cf93d53900}" symbol="4" label="Delete relation"/>
<rule filter=" "action" ='isrt' AND "table_ident" ='rel'" key="{9dd426ba-b916-426d-854e-cb3bcac06aaa}" symbol="5" label="Insert Relation"/>
</rules>
<symbols>
<symbol alpha="0.686275" clip_to_extent="1" type="line" name="0">
<layer pass="1" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="53,129,180,255"/>
<prop k="line_style" v="dot"/>
<prop k="line_width" v="3.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="0.686275" clip_to_extent="1" type="line" name="1">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="53,129,180,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="3.26"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="2">
<layer pass="2" class="MarkerLine" locked="0">
<prop k="interval" v="3"/>
<prop k="interval_map_unit_scale" v="0,0"/>
<prop k="interval_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_along_line" v="0"/>
<prop k="offset_along_line_map_unit_scale" v="0,0"/>
<prop k="offset_along_line_unit" v="MM"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="placement" v="interval"/>
<prop k="rotate" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
<symbol alpha="1" clip_to_extent="1" type="marker" name="@2@0">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="157,157,157,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="line"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="157,157,157,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.8"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="3.5"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="3">
<layer pass="0" class="MarkerLine" locked="0">
<prop k="interval" v="3"/>
<prop k="interval_map_unit_scale" v="0,0"/>
<prop k="interval_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_along_line" v="0"/>
<prop k="offset_along_line_map_unit_scale" v="0,0"/>
<prop k="offset_along_line_unit" v="MM"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="placement" v="interval"/>
<prop k="rotate" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
<symbol alpha="1" clip_to_extent="1" type="marker" name="@3@0">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="247,243,6,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="circle"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="2"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="4">
<layer pass="2" class="MarkerLine" locked="0">
<prop k="interval" v="3"/>
<prop k="interval_map_unit_scale" v="0,0"/>
<prop k="interval_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_along_line" v="0"/>
<prop k="offset_along_line_map_unit_scale" v="0,0"/>
<prop k="offset_along_line_unit" v="MM"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="placement" v="interval"/>
<prop k="rotate" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
<symbol alpha="1" clip_to_extent="1" type="marker" name="@4@0">
<layer pass="0" class="SimpleMarker" locked="0">
<prop k="angle" v="0"/>
<prop k="color" v="0,0,0,255"/>
<prop k="horizontal_anchor_point" v="1"/>
<prop k="name" v="line"/>
<prop k="offset" v="0,0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="outline_color" v="0,0,0,255"/>
<prop k="outline_style" v="solid"/>
<prop k="outline_width" v="0.4"/>
<prop k="outline_width_map_unit_scale" v="0,0"/>
<prop k="outline_width_unit" v="MM"/>
<prop k="scale_method" v="area"/>
<prop k="size" v="3.5"/>
<prop k="size_map_unit_scale" v="0,0"/>
<prop k="size_unit" v="MM"/>
<prop k="vertical_anchor_point" v="1"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</layer>
</symbol>
<symbol alpha="1" clip_to_extent="1" type="line" name="5">
<layer pass="0" class="SimpleLine" locked="0">
<prop k="capstyle" v="square"/>
<prop k="customdash" v="5;2"/>
<prop k="customdash_map_unit_scale" v="0,0"/>
<prop k="customdash_unit" v="MM"/>
<prop k="draw_inside_polygon" v="0"/>
<prop k="joinstyle" v="bevel"/>
<prop k="line_color" v="247,243,6,255"/>
<prop k="line_style" v="solid"/>
<prop k="line_width" v="2"/>
<prop k="line_width_unit" v="MM"/>
<prop k="offset" v="0"/>
<prop k="offset_map_unit_scale" v="0,0"/>
<prop k="offset_unit" v="MM"/>
<prop k="use_custom_dash" v="0"/>
<prop k="width_map_unit_scale" v="0,0"/>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</layer>
</symbol>
</symbols>
<effect enabled="0" type="effectStack">
<effect type="dropShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="outerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
<effect type="drawSource">
<prop k="blend_mode" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="1"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerShadow">
<prop k="blend_mode" v="13"/>
<prop k="blur_level" v="10"/>
<prop k="color" v="0,0,0,255"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="offset_angle" v="135"/>
<prop k="offset_distance" v="2"/>
<prop k="offset_unit" v="MM"/>
<prop k="offset_unit_scale" v="0,0"/>
<prop k="transparency" v="0"/>
</effect>
<effect type="innerGlow">
<prop k="blend_mode" v="0"/>
<prop k="blur_level" v="3"/>
<prop k="color1" v="0,0,255,255"/>
<prop k="color2" v="0,255,0,255"/>
<prop k="color_type" v="0"/>
<prop k="discrete" v="0"/>
<prop k="draw_mode" v="2"/>
<prop k="enabled" v="0"/>
<prop k="single_color" v="255,255,255,255"/>
<prop k="spread" v="2"/>
<prop k="spread_unit" v="MM"/>
<prop k="spread_unit_scale" v="0,0"/>
<prop k="transparency" v="0.5"/>
</effect>
</effect>
</renderer-v2>
<customproperties>
<property key="labeling" value="pal"/>
<property key="labeling/addDirectionSymbol" value="false"/>
<property key="labeling/angleOffset" value="0"/>
<property key="labeling/blendMode" value="0"/>
<property key="labeling/bufferBlendMode" value="0"/>
<property key="labeling/bufferColorA" value="255"/>
<property key="labeling/bufferColorB" value="255"/>
<property key="labeling/bufferColorG" value="255"/>
<property key="labeling/bufferColorR" value="255"/>
<property key="labeling/bufferDraw" value="false"/>
<property key="labeling/bufferJoinStyle" value="64"/>
<property key="labeling/bufferNoFill" value="false"/>
<property key="labeling/bufferSize" value="1"/>
<property key="labeling/bufferSizeInMapUnits" value="false"/>
<property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
<property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
<property key="labeling/bufferTransp" value="0"/>
<property key="labeling/centroidInside" value="false"/>
<property key="labeling/centroidWhole" value="false"/>
<property key="labeling/decimals" value="3"/>
<property key="labeling/displayAll" value="false"/>
<property key="labeling/dist" value="0"/>
<property key="labeling/distInMapUnits" value="false"/>
<property key="labeling/distMapUnitMaxScale" value="0"/>
<property key="labeling/distMapUnitMinScale" value="0"/>
<property key="labeling/enabled" value="false"/>
<property key="labeling/fieldName" value=""/>
<property key="labeling/fontBold" value="false"/>
<property key="labeling/fontCapitals" value="0"/>
<property key="labeling/fontFamily" value="Lucida Grande"/>
<property key="labeling/fontItalic" value="false"/>
<property key="labeling/fontLetterSpacing" value="0"/>
<property key="labeling/fontLimitPixelSize" value="false"/>
<property key="labeling/fontMaxPixelSize" value="10000"/>
<property key="labeling/fontMinPixelSize" value="3"/>
<property key="labeling/fontSize" value="8.25"/>
<property key="labeling/fontSizeInMapUnits" value="false"/>
<property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
<property key="labeling/fontSizeMapUnitMinScale" value="0"/>
<property key="labeling/fontStrikeout" value="false"/>
<property key="labeling/fontUnderline" value="false"/>
<property key="labeling/fontWeight" value="50"/>
<property key="labeling/fontWordSpacing" value="0"/>
<property key="labeling/formatNumbers" value="false"/>
<property key="labeling/isExpression" value="true"/>
<property key="labeling/labelOffsetInMapUnits" value="true"/>
<property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
<property key="labeling/labelPerPart" value="false"/>
<property key="labeling/leftDirectionSymbol" value="<"/>
<property key="labeling/limitNumLabels" value="false"/>
<property key="labeling/maxCurvedCharAngleIn" value="20"/>
<property key="labeling/maxCurvedCharAngleOut" value="-20"/>
<property key="labeling/maxNumLabels" value="2000"/>
<property key="labeling/mergeLines" value="false"/>
<property key="labeling/minFeatureSize" value="0"/>
<property key="labeling/multilineAlign" value="0"/>
<property key="labeling/multilineHeight" value="1"/>
<property key="labeling/namedStyle" value="Normal"/>
<property key="labeling/obstacle" value="true"/>
<property key="labeling/placeDirectionSymbol" value="0"/>
<property key="labeling/placement" value="2"/>
<property key="labeling/placementFlags" value="10"/>
<property key="labeling/plussign" value="false"/>
<property key="labeling/preserveRotation" value="true"/>
<property key="labeling/previewBkgrdColor" value="#ffffff"/>
<property key="labeling/priority" value="5"/>
<property key="labeling/quadOffset" value="4"/>
<property key="labeling/repeatDistance" value="0"/>
<property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
<property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
<property key="labeling/repeatDistanceUnit" value="1"/>
<property key="labeling/reverseDirectionSymbol" value="false"/>
<property key="labeling/rightDirectionSymbol" value=">"/>
<property key="labeling/scaleMax" value="10000000"/>
<property key="labeling/scaleMin" value="1"/>
<property key="labeling/scaleVisibility" value="false"/>
<property key="labeling/shadowBlendMode" value="6"/>
<property key="labeling/shadowColorB" value="0"/>
<property key="labeling/shadowColorG" value="0"/>
<property key="labeling/shadowColorR" value="0"/>
<property key="labeling/shadowDraw" value="false"/>
<property key="labeling/shadowOffsetAngle" value="135"/>
<property key="labeling/shadowOffsetDist" value="1"/>
<property key="labeling/shadowOffsetGlobal" value="true"/>
<property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shadowOffsetUnits" value="1"/>
<property key="labeling/shadowRadius" value="1.5"/>
<property key="labeling/shadowRadiusAlphaOnly" value="false"/>
<property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
<property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
<property key="labeling/shadowRadiusUnits" value="1"/>
<property key="labeling/shadowScale" value="100"/>
<property key="labeling/shadowTransparency" value="30"/>
<property key="labeling/shadowUnder" value="0"/>
<property key="labeling/shapeBlendMode" value="0"/>
<property key="labeling/shapeBorderColorA" value="255"/>
<property key="labeling/shapeBorderColorB" value="128"/>
<property key="labeling/shapeBorderColorG" value="128"/>
<property key="labeling/shapeBorderColorR" value="128"/>
<property key="labeling/shapeBorderWidth" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
<property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
<property key="labeling/shapeBorderWidthUnits" value="1"/>
<property key="labeling/shapeDraw" value="false"/>
<property key="labeling/shapeFillColorA" value="255"/>
<property key="labeling/shapeFillColorB" value="255"/>
<property key="labeling/shapeFillColorG" value="255"/>
<property key="labeling/shapeFillColorR" value="255"/>
<property key="labeling/shapeJoinStyle" value="64"/>
<property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
<property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
<property key="labeling/shapeOffsetUnits" value="1"/>
<property key="labeling/shapeOffsetX" value="0"/>
<property key="labeling/shapeOffsetY" value="0"/>
<property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
<property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
<property key="labeling/shapeRadiiUnits" value="1"/>
<property key="labeling/shapeRadiiX" value="0"/>
<property key="labeling/shapeRadiiY" value="0"/>
<property key="labeling/shapeRotation" value="0"/>
<property key="labeling/shapeRotationType" value="0"/>
<property key="labeling/shapeSVGFile" value=""/>
<property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
<property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
<property key="labeling/shapeSizeType" value="0"/>
<property key="labeling/shapeSizeUnits" value="1"/>
<property key="labeling/shapeSizeX" value="0"/>
<property key="labeling/shapeSizeY" value="0"/>
<property key="labeling/shapeTransparency" value="0"/>
<property key="labeling/shapeType" value="0"/>
<property key="labeling/textColorA" value="255"/>
<property key="labeling/textColorB" value="0"/>
<property key="labeling/textColorG" value="0"/>
<property key="labeling/textColorR" value="0"/>
<property key="labeling/textTransp" value="0"/>
<property key="labeling/upsidedownLabels" value="0"/>
<property key="labeling/wrapChar" value=""/>
<property key="labeling/xOffset" value="0"/>
<property key="labeling/yOffset" value="0"/>
</customproperties>
<blendMode>0</blendMode>
<featureBlendMode>0</featureBlendMode>
<layerTransparency>0</layerTransparency>
<displayfield>id</displayfield>
<label>0</label>
<labelattributes>
<label fieldname="" text="Beschriftung"/>
<family fieldname="" name="MS Shell Dlg 2"/>
<size fieldname="" units="pt" value="12"/>
<bold fieldname="" on="0"/>
<italic fieldname="" on="0"/>
<underline fieldname="" on="0"/>
<strikeout fieldname="" on="0"/>
<color fieldname="" red="0" blue="0" green="0"/>
<x fieldname=""/>
<y fieldname=""/>
<offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
<angle fieldname="" value="0" auto="0"/>
<alignment fieldname="" value="center"/>
<buffercolor fieldname="" red="255" blue="255" green="255"/>
<buffersize fieldname="" units="pt" value="1"/>
<bufferenabled fieldname="" on=""/>
<multilineenabled fieldname="" on=""/>
<selectedonly on=""/>
</labelattributes>
<SingleCategoryDiagramRenderer diagramType="Pie">
<DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" sizeType="MM" minScaleDenominator="0">
<fontProperties description="Lucida Grande,13,-1,5,50,0,0,0,0,0" style=""/>
<attribute field="" color="#000000" label=""/>
</DiagramCategory>
</SingleCategoryDiagramRenderer>
<DiagramLayerSettings yPosColumn="-1" linePlacementFlags="10" placement="2" dist="0" xPosColumn="-1" priority="0" obstacle="0" showAll="1"/>
<editform>.</editform>
<editforminit/>
<featformsuppress>0</featformsuppress>
<annotationform>.</annotationform>
<editorlayout>generatedlayout</editorlayout>
<excludeAttributesWMS/>
<excludeAttributesWFS/>
<attributeactions/>
<edittypes>
<edittype widgetv2type="TextEdit" name="id">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="osm_id">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="tstamp">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="table_ident">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="action">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="members">
<widgetv2config IsMultiline="0" fieldEditable="0" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="power">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="voltage">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="cables">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="wires">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="circuits">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="frequency">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
<edittype widgetv2type="TextEdit" name="hinweis">
<widgetv2config IsMultiline="0" fieldEditable="1" UseHtml="0" labelOnTop="0"/>
</edittype>
</edittypes>
</maplayer>
</projectlayers>
<properties>
<WMSContactPerson type="QString"></WMSContactPerson>
<WMSOnlineResource type="QString"></WMSOnlineResource>
<WMSUseLayerIDs type="bool">false</WMSUseLayerIDs>
<WMSContactOrganization type="QString"></WMSContactOrganization>
<WMSKeywordList type="QStringList">
<value></value>
</WMSKeywordList>
<WFSUrl type="QString"></WFSUrl>
<Paths>
<Absolute type="bool">false</Absolute>
</Paths>
<WMSServiceTitle type="QString"></WMSServiceTitle>
<WFSLayers type="QStringList"/>
<WMSContactMail type="QString"></WMSContactMail>
<PositionPrecision>
<DecimalPlaces type="int">2</DecimalPlaces>
<Automatic type="bool">true</Automatic>
<DegreeFormat type="QString">D</DegreeFormat>
</PositionPrecision>
<WCSUrl type="QString"></WCSUrl>
<WMSContactPhone type="QString"></WMSContactPhone>
<WMSServiceCapabilities type="bool">false</WMSServiceCapabilities>
<WMSServiceAbstract type="QString"></WMSServiceAbstract>
<WMSAddWktGeometry type="bool">false</WMSAddWktGeometry>
<Measure>
<Ellipsoid type="QString">WGS84</Ellipsoid>
</Measure>
<WMSPrecision type="QString">8</WMSPrecision>
<WFSTLayers>
<Insert type="QStringList"/>
<Update type="QStringList"/>
<Delete type="QStringList"/>
</WFSTLayers>
<PAL>
<SearchMethod type="int">0</SearchMethod>
<ShowingShadowRects type="bool">false</ShowingShadowRects>
<CandidatesPolygon type="int">8</CandidatesPolygon>
<ShowingCandidates type="bool">false</ShowingCandidates>
<ShowingPartialsLabels type="bool">true</ShowingPartialsLabels>
<CandidatesLine type="int">8</CandidatesLine>
<CandidatesPoint type="int">8</CandidatesPoint>
<ShowingAllLabels type="bool">false</ShowingAllLabels>
<DrawOutlineLabels type="bool">true</DrawOutlineLabels>
</PAL>
<Gui>
<SelectionColorBluePart type="int">0</SelectionColorBluePart>
<CanvasColorGreenPart type="int">255</CanvasColorGreenPart>
<CanvasColorRedPart type="int">255</CanvasColorRedPart>
<SelectionColorRedPart type="int">255</SelectionColorRedPart>
<SelectionColorAlphaPart type="int">255</SelectionColorAlphaPart>
<SelectionColorGreenPart type="int">255</SelectionColorGreenPart>
<CanvasColorBluePart type="int">255</CanvasColorBluePart>
</Gui>
<Digitizing>
<DefaultSnapToleranceUnit type="int">1</DefaultSnapToleranceUnit>
<LayerSnappingList type="QStringList">
<value>edit_power_relations20151214185446433</value>
<value>power_ways20151204114804618</value>
<value>vw_change_log20151204114804760</value>
</LayerSnappingList>
<LayerSnappingEnabledList type="QStringList">
<value>disabled</value>
<value>disabled</value>
<value>disabled</value>
</LayerSnappingEnabledList>
<SnappingMode type="QString">current_layer</SnappingMode>
<AvoidIntersectionsList type="QStringList"/>
<LayerSnappingToleranceUnitList type="QStringList">
<value>2</value>
<value>2</value>
<value>2</value>
</LayerSnappingToleranceUnitList>
<LayerSnapToList type="QStringList">
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
<value>to_vertex_and_segment</value>
</LayerSnapToList>
<DefaultSnapType type="QString">to vertex</DefaultSnapType>
<DefaultSnapTolerance type="double">15</DefaultSnapTolerance>
<TopologicalEditing type="bool">true</TopologicalEditing>
<LayerSnappingToleranceList type="QStringList">
<value>0.000000</value>
<value>0.000000</value>
<value>0.000000</value>
</LayerSnappingToleranceList>
</Digitizing>
<Identify>
<disabledLayers type="QStringList"/>
</Identify>
<Macros>
<pythonCode type="QString"></pythonCode>
</Macros>
<WMSAccessConstraints type="QString"></WMSAccessConstraints>
<WCSLayers type="QStringList"/>
<Legend>
<filterByMap type="bool">false</filterByMap>
</Legend>
<SpatialRefSys>
<ProjectCRSProj4String type="QString">+proj=longlat +datum=WGS84 +no_defs</ProjectCRSProj4String>
<ProjectCrs type="QString">EPSG:4326</ProjectCrs>
<ProjectCRSID type="int">3452</ProjectCRSID>
<ProjectionsEnabled type="int">1</ProjectionsEnabled>
</SpatialRefSys>
<DefaultStyles>
<Fill type="QString"></Fill>
<Line type="QString"></Line>
<Marker type="QString"></Marker>
<RandomColors type="bool">true</RandomColors>
<AlphaInt type="int">255</AlphaInt>
<ColorRamp type="QString"></ColorRamp>
</DefaultStyles>
<WMSFees type="QString"></WMSFees>
<WMSImageQuality type="int">90</WMSImageQuality>
<WMSUrl type="QString"></WMSUrl>
</properties>
</qgis>
"""
filepath = qgis_proj_dir + "/" + database + "_grid_devel_project.qgs"
if os.path.exists(filepath) == False:
fh = open(filepath ,"w")
fh.write(grid_devel_project)
fh.close()
| 54.943727
| 404
| 0.498031
| 30,977
| 310,487
| 4.919811
| 0.030603
| 0.074704
| 0.140878
| 0.021909
| 0.956864
| 0.952658
| 0.950322
| 0.947336
| 0.945696
| 0.94441
| 0
| 0.05305
| 0.341032
| 310,487
| 5,651
| 405
| 54.943727
| 0.69182
| 0.003797
| 0
| 0.952847
| 0
| 0.014947
| 0.996152
| 0.213922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000178
| false
| 0.006762
| 0.000178
| 0
| 0.000356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2fecce968669bcb32c34baee5fdfc2437b88176c
| 519
|
py
|
Python
|
granule_ingester/granule_ingester/processors/reading_processors/__init__.py
|
skorper/incubator-sdap-ingester
|
40cc47a1c09e5809099a89322b54d0ec31b5a820
|
[
"Apache-2.0"
] | null | null | null |
granule_ingester/granule_ingester/processors/reading_processors/__init__.py
|
skorper/incubator-sdap-ingester
|
40cc47a1c09e5809099a89322b54d0ec31b5a820
|
[
"Apache-2.0"
] | 1
|
2021-05-03T22:13:11.000Z
|
2021-05-03T22:13:11.000Z
|
granule_ingester/granule_ingester/processors/reading_processors/__init__.py
|
wphyojpl/incubator-sdap-ingester
|
472ab158c5bcfd4001a92cc5ebb98c7827f146f8
|
[
"Apache-2.0"
] | null | null | null |
from granule_ingester.processors.reading_processors.EccoReadingProcessor import EccoReadingProcessor
from granule_ingester.processors.reading_processors.GridReadingProcessor import GridReadingProcessor
from granule_ingester.processors.reading_processors.SwathReadingProcessor import SwathReadingProcessor
from granule_ingester.processors.reading_processors.TileReadingProcessor import TileReadingProcessor
from granule_ingester.processors.reading_processors.TimeSeriesReadingProcessor import TimeSeriesReadingProcessor
| 86.5
| 112
| 0.932563
| 45
| 519
| 10.533333
| 0.244444
| 0.116034
| 0.200422
| 0.305907
| 0.485232
| 0.485232
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038536
| 519
| 5
| 113
| 103.8
| 0.9499
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.