hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e17e6d538fc111e2241dd17382ad291abbbbecf1
| 74
|
py
|
Python
|
storages/tests/__init__.py
|
chalkchisel/django-storages
|
333839a4a1851890ab81479e8f610e2f84b7ed8b
|
[
"BSD-3-Clause"
] | 1
|
2018-04-29T17:25:07.000Z
|
2018-04-29T17:25:07.000Z
|
storages/tests/__init__.py
|
chalkchisel/django-storages
|
333839a4a1851890ab81479e8f610e2f84b7ed8b
|
[
"BSD-3-Clause"
] | 8
|
2021-03-31T20:09:47.000Z
|
2022-03-29T22:03:38.000Z
|
storages/tests/__init__.py
|
chalkchisel/django-storages
|
333839a4a1851890ab81479e8f610e2f84b7ed8b
|
[
"BSD-3-Clause"
] | 3
|
2019-01-14T04:53:40.000Z
|
2019-01-22T01:46:31.000Z
|
from storages.tests.hashpath import *
from storages.tests.s3boto import *
| 24.666667
| 37
| 0.810811
| 10
| 74
| 6
| 0.6
| 0.4
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.108108
| 74
| 2
| 38
| 37
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e18ca44d364ec7fe9ef165879f4e95e266231899
| 4,877
|
py
|
Python
|
src/board_utl.py
|
jklypchak13/Connect4Minimax
|
2ea92a89d52e3c9df78d62ed01cb083a37f9df68
|
[
"MIT"
] | null | null | null |
src/board_utl.py
|
jklypchak13/Connect4Minimax
|
2ea92a89d52e3c9df78d62ed01cb083a37f9df68
|
[
"MIT"
] | null | null | null |
src/board_utl.py
|
jklypchak13/Connect4Minimax
|
2ea92a89d52e3c9df78d62ed01cb083a37f9df68
|
[
"MIT"
] | null | null | null |
"""contains functions related to scoring a connect 4 board for minimax
"""
from typing import Dict
import numpy as np
BOARD_ROWS: int = 6
BOARD_COLUMNS: int = 7
def check_vertical(data: np.ndarray, row: int, col: int) -> int:
"""check the vertical direction for a winner at the designated spot
Arguments:
data {np.ndarray} -- the board to check
row {int} -- the row of the current spot to check
col {int} -- the column of the current spot to check
Returns:
int -- the winner (or 0 is no winner)
"""
if data[row, col] == 0 or row > 2:
return 0
for diff in range(3):
if(data[row + diff, col] == data[(row + diff) + 1, col]):
pass
else:
return 0
return data[row, col]
def check_horizontal(data: np.ndarray, row: int, col: int) -> int:
"""check the horizontal direction for a winner at the designated spot
Arguments:
data {np.ndarray} -- the board to check
row {int} -- the row of the current spot to check
col {int} -- the column of the current spot to check
Returns:
int -- the winner (or 0 is no winner)
"""
if data[row, col] == 0 or col > 3:
return 0
for diff in range(3):
if(data[row, col + diff] == data[row, col + 1 + diff]):
pass
else:
return 0
return data[row, col]
def check_positive_diagonal(data: np.ndarray, row: int, col: int) -> int:
"""check the postive diagonal for a winner at the designated spot
Arguments:
data {np.ndarray} -- the board to check
row {int} -- the row of the current spot to check
col {int} -- the column of the current spot to check
Returns:
int -- the winner (or 0 is no winner)
"""
if data[row, col] == 0 or row - 3 < 0 or col + 3 >= BOARD_COLUMNS:
return 0
for diff in range(3):
if(data[row - diff, col + diff] == data[row - 1 - diff, col + 1 + diff]):
pass
else:
return 0
return data[row, col]
def check_negative_diagonal(data: np.ndarray, row: int, col: int) -> np.ndarray:
"""check the negative diagonal for a winner at the designated spot
Arguments:
data {np.ndarray} -- the board to check
row {int} -- the row of the current spot to check
col {int} -- the column of the current spot to check
Returns:
int -- the winner (or 0 is no winner)
"""
if data[row, col] == 0 or row + 3 >= BOARD_ROWS or col + 3 >= BOARD_COLUMNS:
return 0
for diff in range(3):
if(data[row + diff, col + diff] == data[row + 1 + diff, col + 1 + diff]):
pass
else:
return 0
return data[row, col]
def count_lines_of_two(board: np.ndarray) -> Dict[int, int]:
"""count of the number of two consecutive characters in the array
Arguments:
board {np.ndarray} -- the board state the check
Returns:
Dict[int, int] -- a map of integers values to the corresponding counts
"""
count = {0: 0, 1: 0, 2: 0}
for i in range(BOARD_ROWS):
for j in range(BOARD_COLUMNS):
if i + 1 < BOARD_ROWS:
if board[i, j] == board[i + 1, j]:
count[board[i, j]] += 1
if j + 1 < BOARD_COLUMNS:
if board[i, j] == board[i, j + 1]:
count[board[i, j]] += 1
if i + 1 < BOARD_ROWS and j + 1 < BOARD_COLUMNS:
if board[i, j] == board[i + 1, j + 1]:
count[board[i, j]] += 1
if i - 1 >= 0 and j + 1 < BOARD_COLUMNS:
if board[i, j] == board[i - 1, j + 1]:
count[board[i, j]] += 1
return count
def count_lines_of_three(board):
"""count of the number of three consecutive characters in the array
Arguments:
board {np.ndarray} -- the board state the check
Returns:
Dict[int, int] -- a map of integers values to the corresponding counts
"""
count = {0: 0, 1: 0, 2: 0}
for i in range(BOARD_ROWS):
for j in range(BOARD_COLUMNS):
if i + 2 < BOARD_ROWS:
if board[i, j] == board[i + 1, j] and board[i, j] == board[i + 2, j]:
count[board[i, j]] += 1
if j + 2 < BOARD_COLUMNS:
if board[i, j] == board[i, j + 1] and board[i, j] == board[i, j + 2]:
count[board[i, j]] += 1
if i + 2 < BOARD_ROWS and j + 2 < BOARD_COLUMNS:
if board[i, j] == board[i + 1, j + 1] and board[i, j] == board[i + 2, j + 2]:
count[board[i, j]] += 1
if i - 2 >= 0 and j + 2 < BOARD_COLUMNS:
if board[i, j] == board[i - 1, j + 1] and board[i, j] == board[i - 2, j + 2]:
count[board[i, j]] += 1
return count
| 33.634483
| 93
| 0.533935
| 742
| 4,877
| 3.467655
| 0.103774
| 0.074621
| 0.062573
| 0.055966
| 0.871356
| 0.849592
| 0.849592
| 0.841042
| 0.800233
| 0.800233
| 0
| 0.027699
| 0.348575
| 4,877
| 144
| 94
| 33.868056
| 0.782184
| 0.333812
| 0
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0.054054
| 0.027027
| 0
| 0.297297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e1a401783e5645412f813175ad0f4c910519a313
| 22,136
|
py
|
Python
|
tests/test_global_muscle_tracking.py
|
Amedeo123/bioptim
|
c22207a511cd0d39bd735e07dae5d53ca5a81714
|
[
"MIT"
] | 1
|
2021-04-28T03:03:55.000Z
|
2021-04-28T03:03:55.000Z
|
tests/test_global_muscle_tracking.py
|
pariterre/bioptim
|
4064138e7d3fce34e21d488df19941937ce30557
|
[
"MIT"
] | null | null | null |
tests/test_global_muscle_tracking.py
|
pariterre/bioptim
|
4064138e7d3fce34e21d488df19941937ce30557
|
[
"MIT"
] | null | null | null |
"""
Test for file IO
"""
import os
import pytest
import numpy as np
import biorbd_casadi as biorbd
from bioptim import OdeSolver
from .utils import TestUtils
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.COLLOCATION, OdeSolver.IRK])
def test_muscle_activations_and_states_tracking(ode_solver):
# Load muscle_activations_tracker
from bioptim.examples.muscle_driven_ocp import muscle_activations_tracker as ocp_module
bioptim_folder = os.path.dirname(ocp_module.__file__)
# Define the problem
model_path = bioptim_folder + "/models/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.1
n_shooting = 5
use_residual_torque = True
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_activations_ref = ocp_module.generate_data(
biorbd_model, final_time, n_shooting, use_residual_torque=use_residual_torque
)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = ocp_module.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_activations_ref,
x_ref[: biorbd_model.nbQ(), :],
use_residual_torque=use_residual_torque,
kin_data_to_track="q",
ode_solver=ode_solver(),
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
# Check constraints
g = np.array(sol.constraints)
if ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_equal(g.shape, (20 * 5, 1))
np.testing.assert_almost_equal(g, np.zeros((20 * 5, 1)), decimal=6)
else:
np.testing.assert_equal(g.shape, (20, 1))
np.testing.assert_almost_equal(g, np.zeros((20, 1)), decimal=6)
# Check some of the results
q, qdot, tau, mus = sol.states["q"], sol.states["qdot"], sol.controls["tau"], sol.controls["muscles"]
if ode_solver == OdeSolver.IRK:
np.testing.assert_almost_equal(f[0, 0], 3.624795808383824e-08)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.26294409e-05, -5.94685627e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10541975, -0.48577985]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00074118, -0.00036854]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21473881, 7.26398638]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-3.19231945e-08, 1.78181204e-06]))
np.testing.assert_almost_equal(tau[:, -2], np.array([2.55285701e-06, -5.12710950e-06]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37451645, 0.95067812, 0.73199474, 0.59864193, 0.15601703, 0.15600089])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.4559321, 0.78521782, 0.19970124, 0.51419847, 0.59238012, 0.04656187])
)
elif ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_almost_equal(f[0, 0], 3.6846293820760475e-08)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.26294409e-05, -5.94685627e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10541975, -0.48577985]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00074233, -0.00037249]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21473503, 7.26397692]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-3.19231945e-08, 1.78181204e-06]))
np.testing.assert_almost_equal(tau[:, -2], np.array([2.55285701e-06, -5.12710950e-06]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37451633, 0.95067815, 0.73199481, 0.5986417, 0.15601682, 0.15600081])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.4559318, 0.78521793, 0.19970129, 0.51419838, 0.59238004, 0.04656203])
)
elif ode_solver == OdeSolver.RK4:
np.testing.assert_almost_equal(f[0, 0], 3.624795808383824e-08)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.24603457e-05, -5.56567245e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10542008, -0.48578046]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00071319, -0.00034956]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21476386, 7.26402641]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([7.86364319e-08, 1.43718933e-06]))
np.testing.assert_almost_equal(tau[:, -2], np.array([2.33336715e-06, -4.52483197e-06]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.3745183, 0.9506776, 0.7319939, 0.59864459, 0.15601947, 0.15600189])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.45594578, 0.78521284, 0.19969902, 0.51420259, 0.5923839, 0.04655438])
)
else:
raise ValueError("Test not implemented")
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol, decimal_value=5)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.COLLOCATION, OdeSolver.IRK])
def test_muscle_activation_no_residual_torque_and_markers_tracking(ode_solver):
# Load muscle_activations_tracker
from bioptim.examples.muscle_driven_ocp import muscle_activations_tracker as ocp_module
bioptim_folder = os.path.dirname(ocp_module.__file__)
# Define the problem
model_path = bioptim_folder + "/models/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.1
n_shooting = 5
use_residual_torque = False
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_activations_ref = ocp_module.generate_data(
biorbd_model, final_time, n_shooting, use_residual_torque=use_residual_torque
)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = ocp_module.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_activations_ref,
x_ref[: biorbd_model.nbQ(), :],
use_residual_torque=use_residual_torque,
kin_data_to_track="q",
ode_solver=ode_solver(),
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 3.634248634056222e-08)
# Check constraints
g = np.array(sol.constraints)
if ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_equal(g.shape, (20 * 5, 1))
np.testing.assert_almost_equal(g, np.zeros((20 * 5, 1)), decimal=6)
else:
np.testing.assert_equal(g.shape, (20, 1))
np.testing.assert_almost_equal(g, np.zeros((20, 1)), decimal=6)
# Check some of the results
q, qdot, mus = sol.states["q"], sol.states["qdot"], sol.controls["muscles"]
if ode_solver == OdeSolver.IRK:
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.26502327e-05, -5.98498658e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10541969, -0.48577983]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00074251, -0.00036937]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21474217, 7.26398954]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37451604, 0.95067823, 0.73199494, 0.59864126, 0.15601641, 0.15600064])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.45593194, 0.78521787, 0.19970125, 0.51419844, 0.5923801, 0.04656193])
)
elif ode_solver == OdeSolver.COLLOCATION:
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.26434090e-05, -5.99992755e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10541971, -0.48577986]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00074381, -0.00037358]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21473839, 7.26398039]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37451604, 0.95067823, 0.73199495, 0.59864125, 0.1560164, 0.15600064])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.45593167, 0.78521797, 0.1997013, 0.51419836, 0.59238002, 0.04656208])
)
elif ode_solver == OdeSolver.RK4:
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.24679103e-05, -5.63685028e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.10542003, -0.48578047]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00071458, -0.00035055]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-4.21476717, 7.26402945]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.3745179, 0.95067771, 0.7319941, 0.59864394, 0.15601888, 0.15600164])
)
np.testing.assert_almost_equal(
mus[:, -2], np.array([0.45594564, 0.78521289, 0.19969903, 0.51420257, 0.59238388, 0.04655442])
)
else:
raise ValueError("Test not ready")
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol, decimal_value=6)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.COLLOCATION, OdeSolver.IRK])
def test_muscle_excitation_with_torque_and_markers_tracking(ode_solver):
# Load muscle_excitations_tracker
from bioptim.examples.muscle_driven_ocp import muscle_excitations_tracker as ocp_module
bioptim_folder = os.path.dirname(ocp_module.__file__)
# Define the problem
model_path = bioptim_folder + "/models/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.1
n_shooting = 5
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_excitations_ref = ocp_module.generate_data(biorbd_model, final_time, n_shooting)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = ocp_module.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_excitations_ref,
x_ref[: biorbd_model.nbQ(), :].T,
use_residual_torque=True,
kin_data_to_track="markers",
ode_solver=ode_solver(),
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
# Check constraints
g = np.array(sol.constraints)
if ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_equal(g.shape, (50 * 5, 1))
np.testing.assert_almost_equal(g, np.zeros((50 * 5, 1)), decimal=6)
else:
np.testing.assert_equal(g.shape, (50, 1))
np.testing.assert_almost_equal(g, np.zeros((50, 1)), decimal=6)
# Check some of the results
q, qdot, mus_states, tau, mus_controls = (
sol.states["q"],
sol.states["qdot"],
sol.states["muscles"],
sol.controls["tau"],
sol.controls["muscles"],
)
if ode_solver == OdeSolver.IRK:
np.testing.assert_almost_equal(f[0, 0], 3.9377280548492226e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00351782, 0.01702219]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.14352637, -0.72030433]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.02984019, -3.91364352]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.67284629, 3.62405443]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51285729, 0.69943619, 0.40390569, 0.48032451, 0.53752346, 0.31437668])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([5.42775569e-05, -3.45713249e-04]))
np.testing.assert_almost_equal(tau[:, -2], np.array([-2.73167136e-05, -3.83494902e-05]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37743387, 0.95055777, 0.73174428, 0.60093014, 0.15924303, 0.15866534])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.4560975, 0.78519158, 0.19973384, 0.51408083, 0.59227422, 0.04659415])
)
elif ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_almost_equal(f[0, 0], 3.9378422266498184e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00351729, 0.01701928]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.14352497, -0.72030059]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.02972633, -3.91317111]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.6728683, 3.62413508]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51285285, 0.69943161, 0.40390586, 0.48032585, 0.53752527, 0.31437738])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([5.42926592e-05, -3.45716906e-04]))
np.testing.assert_almost_equal(tau[:, -2], np.array([-2.72776735e-05, -3.84479459e-05]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37744597, 0.95044549, 0.73173082, 0.60092211, 0.15932209, 0.15869578])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.45609644, 0.78518702, 0.19973488, 0.51408246, 0.59227441, 0.04659677])
)
elif ode_solver == OdeSolver.RK4:
np.testing.assert_almost_equal(f[0, 0], 3.9163147567423305e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00352334, 0.01700853]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.14350606, -0.72027301]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.02920952, -3.91032827]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.67351448, 3.62485659]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51283945, 0.6994339, 0.40390624, 0.48031161, 0.53750849, 0.31441088])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([5.44773721e-05, -3.45454293e-04]))
np.testing.assert_almost_equal(tau[:, -2], np.array([-2.68029143e-05, -3.90467765e-05]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37740553, 0.95056685, 0.73174651, 0.60092669, 0.15924254, 0.15856357])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.45609247, 0.7851955, 0.19973458, 0.51407787, 0.59227145, 0.04659596])
)
else:
raise ValueError("Test not ready")
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.COLLOCATION, OdeSolver.IRK])
def test_muscle_excitation_no_residual_torque_and_markers_tracking(ode_solver):
# Load muscle_excitations_tracker
from bioptim.examples.muscle_driven_ocp import muscle_excitations_tracker as ocp_module
bioptim_folder = os.path.dirname(ocp_module.__file__)
# Define the problem
model_path = bioptim_folder + "/models/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.1
n_shooting = 5
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_excitations_ref = ocp_module.generate_data(biorbd_model, final_time, n_shooting)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = ocp_module.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_excitations_ref,
x_ref[: biorbd_model.nbQ(), :].T,
use_residual_torque=False,
kin_data_to_track="markers",
ode_solver=ode_solver(),
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
# Check constraints
g = np.array(sol.constraints)
if ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_equal(g.shape, (50 * 5, 1))
np.testing.assert_almost_equal(g, np.zeros((50 * 5, 1)), decimal=6)
else:
np.testing.assert_equal(g.shape, (50, 1))
np.testing.assert_almost_equal(g, np.zeros((50, 1)), decimal=6)
# Check some of the results
q, qdot, mus_states, mus_controls = (
sol.states["q"],
sol.states["qdot"],
sol.states["muscles"],
sol.controls["muscles"],
)
if ode_solver == OdeSolver.IRK:
np.testing.assert_almost_equal(f[0, 0], 3.939617534835209e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00352248, 0.01703644]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.1435249, -0.7202986]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.03023126, -3.91481759]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.67283616, 3.62412467]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51285401, 0.69943683, 0.40390633, 0.48032393, 0.53752275, 0.31437821])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37743017, 0.95055919, 0.73174445, 0.60093176, 0.15924552, 0.15866818])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.45609693, 0.78519207, 0.19973399, 0.51408032, 0.59227376, 0.04659447])
)
elif ode_solver == OdeSolver.COLLOCATION:
np.testing.assert_almost_equal(f[0, 0], 3.939731707680551e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00352196, 0.01703354]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.1435235, -0.72029486]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.03011751, -3.91434553]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.6728582, 3.62420546]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51284957, 0.69943225, 0.40390649, 0.48032527, 0.53752456, 0.31437891])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37744227, 0.95044691, 0.73173098, 0.60092373, 0.15932458, 0.15869862])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.45609587, 0.78518751, 0.19973503, 0.51408194, 0.59227395, 0.04659709])
)
elif ode_solver == OdeSolver.RK4:
np.testing.assert_almost_equal(f[0, 0], 3.918210818142734e-05)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00352802, 0.01702281]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.14350458, -0.72026726]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([1.02960131, -3.91150408]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([-3.67350467, 3.62492773]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.51283615, 0.69943454, 0.40390687, 0.48031102, 0.53750777, 0.31441242])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37740184, 0.95056827, 0.73174668, 0.60092831, 0.15924504, 0.15856629])
)
np.testing.assert_almost_equal(
mus_controls[:, -2], np.array([0.4560919, 0.785196, 0.19973472, 0.51407736, 0.59227099, 0.04659628])
)
else:
raise ValueError("Test not implemented")
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
| 43.747036
| 115
| 0.651247
| 3,057
| 22,136
| 4.540726
| 0.144259
| 0.081694
| 0.136157
| 0.172466
| 0.800879
| 0.796701
| 0.796701
| 0.796701
| 0.794539
| 0.788056
| 0
| 0.198586
| 0.207671
| 22,136
| 505
| 116
| 43.833663
| 0.59285
| 0.096811
| 0
| 0.570637
| 0
| 0
| 0.013663
| 0
| 0
| 0
| 0
| 0
| 0.34903
| 1
| 0.01108
| false
| 0
| 0.027701
| 0
| 0.038781
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1a59b1056dc76f23a5c22df703efbdbbece8c10
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_graves/na_graves_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_graves/na_graves_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_graves/na_graves_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Graves_Jng_Aatrox(Ratings):
pass
class NA_Graves_Jng_Ahri(Ratings):
pass
class NA_Graves_Jng_Akali(Ratings):
pass
class NA_Graves_Jng_Alistar(Ratings):
pass
class NA_Graves_Jng_Amumu(Ratings):
pass
class NA_Graves_Jng_Anivia(Ratings):
pass
class NA_Graves_Jng_Annie(Ratings):
pass
class NA_Graves_Jng_Ashe(Ratings):
pass
class NA_Graves_Jng_AurelionSol(Ratings):
pass
class NA_Graves_Jng_Azir(Ratings):
pass
class NA_Graves_Jng_Bard(Ratings):
pass
class NA_Graves_Jng_Blitzcrank(Ratings):
pass
class NA_Graves_Jng_Brand(Ratings):
pass
class NA_Graves_Jng_Braum(Ratings):
pass
class NA_Graves_Jng_Caitlyn(Ratings):
pass
class NA_Graves_Jng_Camille(Ratings):
pass
class NA_Graves_Jng_Cassiopeia(Ratings):
pass
class NA_Graves_Jng_Chogath(Ratings):
pass
class NA_Graves_Jng_Corki(Ratings):
pass
class NA_Graves_Jng_Darius(Ratings):
pass
class NA_Graves_Jng_Diana(Ratings):
pass
class NA_Graves_Jng_Draven(Ratings):
pass
class NA_Graves_Jng_DrMundo(Ratings):
pass
class NA_Graves_Jng_Ekko(Ratings):
pass
class NA_Graves_Jng_Elise(Ratings):
pass
class NA_Graves_Jng_Evelynn(Ratings):
pass
class NA_Graves_Jng_Ezreal(Ratings):
pass
class NA_Graves_Jng_Fiddlesticks(Ratings):
pass
class NA_Graves_Jng_Fiora(Ratings):
pass
class NA_Graves_Jng_Fizz(Ratings):
pass
class NA_Graves_Jng_Galio(Ratings):
pass
class NA_Graves_Jng_Gangplank(Ratings):
pass
class NA_Graves_Jng_Garen(Ratings):
pass
class NA_Graves_Jng_Gnar(Ratings):
pass
class NA_Graves_Jng_Gragas(Ratings):
pass
class NA_Graves_Jng_Graves(Ratings):
pass
class NA_Graves_Jng_Hecarim(Ratings):
pass
class NA_Graves_Jng_Heimerdinger(Ratings):
pass
class NA_Graves_Jng_Illaoi(Ratings):
pass
class NA_Graves_Jng_Irelia(Ratings):
pass
class NA_Graves_Jng_Ivern(Ratings):
pass
class NA_Graves_Jng_Janna(Ratings):
pass
class NA_Graves_Jng_JarvanIV(Ratings):
pass
class NA_Graves_Jng_Jax(Ratings):
pass
class NA_Graves_Jng_Jayce(Ratings):
pass
class NA_Graves_Jng_Jhin(Ratings):
pass
class NA_Graves_Jng_Jinx(Ratings):
pass
class NA_Graves_Jng_Kalista(Ratings):
pass
class NA_Graves_Jng_Karma(Ratings):
pass
class NA_Graves_Jng_Karthus(Ratings):
pass
class NA_Graves_Jng_Kassadin(Ratings):
pass
class NA_Graves_Jng_Katarina(Ratings):
pass
class NA_Graves_Jng_Kayle(Ratings):
pass
class NA_Graves_Jng_Kayn(Ratings):
pass
class NA_Graves_Jng_Kennen(Ratings):
pass
class NA_Graves_Jng_Khazix(Ratings):
pass
class NA_Graves_Jng_Kindred(Ratings):
pass
class NA_Graves_Jng_Kled(Ratings):
pass
class NA_Graves_Jng_KogMaw(Ratings):
pass
class NA_Graves_Jng_Leblanc(Ratings):
pass
class NA_Graves_Jng_LeeSin(Ratings):
pass
class NA_Graves_Jng_Leona(Ratings):
pass
class NA_Graves_Jng_Lissandra(Ratings):
pass
class NA_Graves_Jng_Lucian(Ratings):
pass
class NA_Graves_Jng_Lulu(Ratings):
pass
class NA_Graves_Jng_Lux(Ratings):
pass
class NA_Graves_Jng_Malphite(Ratings):
pass
class NA_Graves_Jng_Malzahar(Ratings):
pass
class NA_Graves_Jng_Maokai(Ratings):
pass
class NA_Graves_Jng_MasterYi(Ratings):
pass
class NA_Graves_Jng_MissFortune(Ratings):
pass
class NA_Graves_Jng_MonkeyKing(Ratings):
pass
class NA_Graves_Jng_Mordekaiser(Ratings):
pass
class NA_Graves_Jng_Morgana(Ratings):
pass
class NA_Graves_Jng_Nami(Ratings):
pass
class NA_Graves_Jng_Nasus(Ratings):
pass
class NA_Graves_Jng_Nautilus(Ratings):
pass
class NA_Graves_Jng_Nidalee(Ratings):
pass
class NA_Graves_Jng_Nocturne(Ratings):
pass
class NA_Graves_Jng_Nunu(Ratings):
pass
class NA_Graves_Jng_Olaf(Ratings):
pass
class NA_Graves_Jng_Orianna(Ratings):
pass
class NA_Graves_Jng_Ornn(Ratings):
pass
class NA_Graves_Jng_Pantheon(Ratings):
pass
class NA_Graves_Jng_Poppy(Ratings):
pass
class NA_Graves_Jng_Quinn(Ratings):
pass
class NA_Graves_Jng_Rakan(Ratings):
pass
class NA_Graves_Jng_Rammus(Ratings):
pass
class NA_Graves_Jng_RekSai(Ratings):
pass
class NA_Graves_Jng_Renekton(Ratings):
pass
class NA_Graves_Jng_Rengar(Ratings):
pass
class NA_Graves_Jng_Riven(Ratings):
pass
class NA_Graves_Jng_Rumble(Ratings):
pass
class NA_Graves_Jng_Ryze(Ratings):
pass
class NA_Graves_Jng_Sejuani(Ratings):
pass
class NA_Graves_Jng_Shaco(Ratings):
pass
class NA_Graves_Jng_Shen(Ratings):
pass
class NA_Graves_Jng_Shyvana(Ratings):
pass
class NA_Graves_Jng_Singed(Ratings):
pass
class NA_Graves_Jng_Sion(Ratings):
pass
class NA_Graves_Jng_Sivir(Ratings):
pass
class NA_Graves_Jng_Skarner(Ratings):
pass
class NA_Graves_Jng_Sona(Ratings):
pass
class NA_Graves_Jng_Soraka(Ratings):
pass
class NA_Graves_Jng_Swain(Ratings):
pass
class NA_Graves_Jng_Syndra(Ratings):
pass
class NA_Graves_Jng_TahmKench(Ratings):
pass
class NA_Graves_Jng_Taliyah(Ratings):
pass
class NA_Graves_Jng_Talon(Ratings):
pass
class NA_Graves_Jng_Taric(Ratings):
pass
class NA_Graves_Jng_Teemo(Ratings):
pass
class NA_Graves_Jng_Thresh(Ratings):
pass
class NA_Graves_Jng_Tristana(Ratings):
pass
class NA_Graves_Jng_Trundle(Ratings):
pass
class NA_Graves_Jng_Tryndamere(Ratings):
pass
class NA_Graves_Jng_TwistedFate(Ratings):
pass
class NA_Graves_Jng_Twitch(Ratings):
pass
class NA_Graves_Jng_Udyr(Ratings):
pass
class NA_Graves_Jng_Urgot(Ratings):
pass
class NA_Graves_Jng_Varus(Ratings):
pass
class NA_Graves_Jng_Vayne(Ratings):
pass
class NA_Graves_Jng_Veigar(Ratings):
pass
class NA_Graves_Jng_Velkoz(Ratings):
pass
class NA_Graves_Jng_Vi(Ratings):
pass
class NA_Graves_Jng_Viktor(Ratings):
pass
class NA_Graves_Jng_Vladimir(Ratings):
pass
class NA_Graves_Jng_Volibear(Ratings):
pass
class NA_Graves_Jng_Warwick(Ratings):
pass
class NA_Graves_Jng_Xayah(Ratings):
pass
class NA_Graves_Jng_Xerath(Ratings):
pass
class NA_Graves_Jng_XinZhao(Ratings):
pass
class NA_Graves_Jng_Yasuo(Ratings):
pass
class NA_Graves_Jng_Yorick(Ratings):
pass
class NA_Graves_Jng_Zac(Ratings):
pass
class NA_Graves_Jng_Zed(Ratings):
pass
class NA_Graves_Jng_Ziggs(Ratings):
pass
class NA_Graves_Jng_Zilean(Ratings):
pass
class NA_Graves_Jng_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
830cf45d0e8230d090f73dc055d7a7e963aa8d28
| 93
|
py
|
Python
|
openpype/tools/tray/__main__.py
|
jonclothcat/OpenPype
|
d1208cbebc0a7f378de0062ccd653295c6399195
|
[
"MIT"
] | 87
|
2021-05-07T08:40:46.000Z
|
2022-03-19T00:36:25.000Z
|
openpype/tools/tray/__main__.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | 1,019
|
2021-04-26T06:22:56.000Z
|
2022-03-31T16:30:43.000Z
|
openpype/tools/tray/__main__.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | 33
|
2021-04-29T12:35:54.000Z
|
2022-03-25T14:48:42.000Z
|
try:
from . import pype_tray
except ImportError:
import pype_tray
pype_tray.main()
| 11.625
| 27
| 0.72043
| 13
| 93
| 4.923077
| 0.615385
| 0.375
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215054
| 93
| 7
| 28
| 13.285714
| 0.876712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8345f4986614f20e46f5c5d1b8ae20c5bcf086a2
| 2,779
|
py
|
Python
|
orbitscalc/migrations/0033_auto_20200213_1202.py
|
Surf1sh/Orbitscalc
|
2cf2914ec2fead4ebff76115e0747cc15d2f08c9
|
[
"Apache-2.0"
] | 1
|
2021-04-20T23:39:58.000Z
|
2021-04-20T23:39:58.000Z
|
orbitscalc/migrations/0033_auto_20200213_1202.py
|
Surf1sh/Orbitscalc
|
2cf2914ec2fead4ebff76115e0747cc15d2f08c9
|
[
"Apache-2.0"
] | null | null | null |
orbitscalc/migrations/0033_auto_20200213_1202.py
|
Surf1sh/Orbitscalc
|
2cf2914ec2fead4ebff76115e0747cc15d2f08c9
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.6 on 2020-02-13 11:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orbitscalc', '0032_ge_groundstation'),
]
operations = [
migrations.AddField(
model_name='sana_aperture',
name='alt',
field=models.DecimalField(decimal_places=3, max_digits=7, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='diameter_m',
field=models.DecimalField(decimal_places=3, max_digits=7, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='downlink_frequency_max_MHz',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='downlink_frequency_min_MHz',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='eirp_max_dbw',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='eirp_min_dbw',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='gsname',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='gt_max_dbw_k',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='gt_min_dbw_k',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='lat',
field=models.DecimalField(decimal_places=12, max_digits=15, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='lon',
field=models.DecimalField(decimal_places=12, max_digits=15, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='uplink_frequency_max_MHz',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
migrations.AddField(
model_name='sana_aperture',
name='uplink_frequency_min_MHz',
field=models.DecimalField(decimal_places=3, max_digits=10, null=True),
),
]
| 35.177215
| 83
| 0.593379
| 295
| 2,779
| 5.332203
| 0.2
| 0.14876
| 0.190083
| 0.223141
| 0.86014
| 0.86014
| 0.86014
| 0.832804
| 0.832804
| 0.832804
| 0
| 0.029607
| 0.29507
| 2,779
| 78
| 84
| 35.628205
| 0.773354
| 0.016193
| 0
| 0.708333
| 1
| 0
| 0.13653
| 0.04429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013889
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
55debc078945bac7b1d98ad4c3ee7193e8baa54d
| 22,088
|
py
|
Python
|
sdk/python/pulumi_pagerduty/extension.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2020-05-27T08:18:35.000Z
|
2021-07-31T08:40:03.000Z
|
sdk/python/pulumi_pagerduty/extension.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 48
|
2020-05-26T10:59:40.000Z
|
2022-03-31T15:41:54.000Z
|
sdk/python/pulumi_pagerduty/extension.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-26T17:51:56.000Z
|
2020-05-26T17:51:56.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ExtensionArgs', 'Extension']
@pulumi.input_type
class ExtensionArgs:
def __init__(__self__, *,
extension_objects: pulumi.Input[Sequence[pulumi.Input[str]]],
extension_schema: pulumi.Input[str],
config: Optional[pulumi.Input[str]] = None,
endpoint_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Extension resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] extension_objects: This is the objects for which the extension applies (An array of service ids).
:param pulumi.Input[str] extension_schema: This is the schema for this extension.
:param pulumi.Input[str] config: The configuration of the service extension as string containing plain JSON-encoded data.
:param pulumi.Input[str] endpoint_url: The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
:param pulumi.Input[str] name: The name of the service extension.
"""
pulumi.set(__self__, "extension_objects", extension_objects)
pulumi.set(__self__, "extension_schema", extension_schema)
if config is not None:
pulumi.set(__self__, "config", config)
if endpoint_url is not None:
pulumi.set(__self__, "endpoint_url", endpoint_url)
if name is not None:
pulumi.set(__self__, "name", name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="extensionObjects")
def extension_objects(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
This is the objects for which the extension applies (An array of service ids).
"""
return pulumi.get(self, "extension_objects")
@extension_objects.setter
def extension_objects(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "extension_objects", value)
@property
@pulumi.getter(name="extensionSchema")
def extension_schema(self) -> pulumi.Input[str]:
"""
This is the schema for this extension.
"""
return pulumi.get(self, "extension_schema")
@extension_schema.setter
def extension_schema(self, value: pulumi.Input[str]):
pulumi.set(self, "extension_schema", value)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input[str]]:
"""
The configuration of the service extension as string containing plain JSON-encoded data.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="endpointUrl")
def endpoint_url(self) -> Optional[pulumi.Input[str]]:
"""
The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
"""
return pulumi.get(self, "endpoint_url")
@endpoint_url.setter
def endpoint_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_url", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the service extension.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class _ExtensionState:
def __init__(__self__, *,
config: Optional[pulumi.Input[str]] = None,
endpoint_url: Optional[pulumi.Input[str]] = None,
extension_objects: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
extension_schema: Optional[pulumi.Input[str]] = None,
html_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Extension resources.
:param pulumi.Input[str] config: The configuration of the service extension as string containing plain JSON-encoded data.
:param pulumi.Input[str] endpoint_url: The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
:param pulumi.Input[Sequence[pulumi.Input[str]]] extension_objects: This is the objects for which the extension applies (An array of service ids).
:param pulumi.Input[str] extension_schema: This is the schema for this extension.
:param pulumi.Input[str] html_url: URL at which the entity is uniquely displayed in the Web app
:param pulumi.Input[str] name: The name of the service extension.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if endpoint_url is not None:
pulumi.set(__self__, "endpoint_url", endpoint_url)
if extension_objects is not None:
pulumi.set(__self__, "extension_objects", extension_objects)
if extension_schema is not None:
pulumi.set(__self__, "extension_schema", extension_schema)
if html_url is not None:
pulumi.set(__self__, "html_url", html_url)
if name is not None:
pulumi.set(__self__, "name", name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input[str]]:
"""
The configuration of the service extension as string containing plain JSON-encoded data.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="endpointUrl")
def endpoint_url(self) -> Optional[pulumi.Input[str]]:
"""
The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
"""
return pulumi.get(self, "endpoint_url")
@endpoint_url.setter
def endpoint_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_url", value)
@property
@pulumi.getter(name="extensionObjects")
def extension_objects(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
This is the objects for which the extension applies (An array of service ids).
"""
return pulumi.get(self, "extension_objects")
@extension_objects.setter
def extension_objects(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "extension_objects", value)
@property
@pulumi.getter(name="extensionSchema")
def extension_schema(self) -> Optional[pulumi.Input[str]]:
"""
This is the schema for this extension.
"""
return pulumi.get(self, "extension_schema")
@extension_schema.setter
def extension_schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "extension_schema", value)
@property
@pulumi.getter(name="htmlUrl")
def html_url(self) -> Optional[pulumi.Input[str]]:
"""
URL at which the entity is uniquely displayed in the Web app
"""
return pulumi.get(self, "html_url")
@html_url.setter
def html_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "html_url", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the service extension.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Extension(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config: Optional[pulumi.Input[str]] = None,
endpoint_url: Optional[pulumi.Input[str]] = None,
extension_objects: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
extension_schema: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An [extension](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1extensions/post) can be associated with a service.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
webhook = pagerduty.get_extension_schema(name="Generic V2 Webhook")
example_user = pagerduty.User("exampleUser", email="howard.james@example.domain")
example_escalation_policy = pagerduty.EscalationPolicy("exampleEscalationPolicy",
num_loops=2,
rules=[pagerduty.EscalationPolicyRuleArgs(
escalation_delay_in_minutes=10,
targets=[pagerduty.EscalationPolicyRuleTargetArgs(
type="user",
id=example_user.id,
)],
)])
example_service = pagerduty.Service("exampleService",
auto_resolve_timeout="14400",
acknowledgement_timeout="600",
escalation_policy=example_escalation_policy.id)
slack = pagerduty.Extension("slack",
endpoint_url="https://generic_webhook_url/XXXXXX/BBBBBB",
extension_schema=webhook.id,
extension_objects=[example_service.id],
config=\"\"\"{
"restrict": "any",
"notify_types": {
"resolve": false,
"acknowledge": false,
"assignments": false
},
"access_token": "XXX"
}
\"\"\")
```
## Import
Extensions can be imported using the id.e.g.
```sh
$ pulumi import pagerduty:index/extension:Extension main PLBP09X
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config: The configuration of the service extension as string containing plain JSON-encoded data.
:param pulumi.Input[str] endpoint_url: The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
:param pulumi.Input[Sequence[pulumi.Input[str]]] extension_objects: This is the objects for which the extension applies (An array of service ids).
:param pulumi.Input[str] extension_schema: This is the schema for this extension.
:param pulumi.Input[str] name: The name of the service extension.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ExtensionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An [extension](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1extensions/post) can be associated with a service.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
webhook = pagerduty.get_extension_schema(name="Generic V2 Webhook")
example_user = pagerduty.User("exampleUser", email="howard.james@example.domain")
example_escalation_policy = pagerduty.EscalationPolicy("exampleEscalationPolicy",
num_loops=2,
rules=[pagerduty.EscalationPolicyRuleArgs(
escalation_delay_in_minutes=10,
targets=[pagerduty.EscalationPolicyRuleTargetArgs(
type="user",
id=example_user.id,
)],
)])
example_service = pagerduty.Service("exampleService",
auto_resolve_timeout="14400",
acknowledgement_timeout="600",
escalation_policy=example_escalation_policy.id)
slack = pagerduty.Extension("slack",
endpoint_url="https://generic_webhook_url/XXXXXX/BBBBBB",
extension_schema=webhook.id,
extension_objects=[example_service.id],
config=\"\"\"{
"restrict": "any",
"notify_types": {
"resolve": false,
"acknowledge": false,
"assignments": false
},
"access_token": "XXX"
}
\"\"\")
```
## Import
Extensions can be imported using the id.e.g.
```sh
$ pulumi import pagerduty:index/extension:Extension main PLBP09X
```
:param str resource_name: The name of the resource.
:param ExtensionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ExtensionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config: Optional[pulumi.Input[str]] = None,
endpoint_url: Optional[pulumi.Input[str]] = None,
extension_objects: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
extension_schema: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ExtensionArgs.__new__(ExtensionArgs)
__props__.__dict__["config"] = config
__props__.__dict__["endpoint_url"] = endpoint_url
if extension_objects is None and not opts.urn:
raise TypeError("Missing required property 'extension_objects'")
__props__.__dict__["extension_objects"] = extension_objects
if extension_schema is None and not opts.urn:
raise TypeError("Missing required property 'extension_schema'")
__props__.__dict__["extension_schema"] = extension_schema
__props__.__dict__["name"] = name
__props__.__dict__["type"] = type
__props__.__dict__["html_url"] = None
super(Extension, __self__).__init__(
'pagerduty:index/extension:Extension',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
config: Optional[pulumi.Input[str]] = None,
endpoint_url: Optional[pulumi.Input[str]] = None,
extension_objects: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
extension_schema: Optional[pulumi.Input[str]] = None,
html_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'Extension':
"""
Get an existing Extension resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config: The configuration of the service extension as string containing plain JSON-encoded data.
:param pulumi.Input[str] endpoint_url: The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
:param pulumi.Input[Sequence[pulumi.Input[str]]] extension_objects: This is the objects for which the extension applies (An array of service ids).
:param pulumi.Input[str] extension_schema: This is the schema for this extension.
:param pulumi.Input[str] html_url: URL at which the entity is uniquely displayed in the Web app
:param pulumi.Input[str] name: The name of the service extension.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ExtensionState.__new__(_ExtensionState)
__props__.__dict__["config"] = config
__props__.__dict__["endpoint_url"] = endpoint_url
__props__.__dict__["extension_objects"] = extension_objects
__props__.__dict__["extension_schema"] = extension_schema
__props__.__dict__["html_url"] = html_url
__props__.__dict__["name"] = name
__props__.__dict__["type"] = type
return Extension(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def config(self) -> pulumi.Output[Optional[str]]:
"""
The configuration of the service extension as string containing plain JSON-encoded data.
"""
return pulumi.get(self, "config")
@property
@pulumi.getter(name="endpointUrl")
def endpoint_url(self) -> pulumi.Output[Optional[str]]:
"""
The url of the extension.
**Note:** The [endpoint URL is Optional API wise](https://api-reference.pagerduty.com/#!/Extensions/post_extensions) in most cases. But in some cases it is a _Required_ parameter. For example, `getExtensionSchema` named `Generic V2 Webhook` doesn't accept `Extension` with no `endpoint_url`, but one with named `Slack` accepts.
"""
return pulumi.get(self, "endpoint_url")
@property
@pulumi.getter(name="extensionObjects")
def extension_objects(self) -> pulumi.Output[Sequence[str]]:
"""
This is the objects for which the extension applies (An array of service ids).
"""
return pulumi.get(self, "extension_objects")
@property
@pulumi.getter(name="extensionSchema")
def extension_schema(self) -> pulumi.Output[str]:
"""
This is the schema for this extension.
"""
return pulumi.get(self, "extension_schema")
@property
@pulumi.getter(name="htmlUrl")
def html_url(self) -> pulumi.Output[str]:
"""
URL at which the entity is uniquely displayed in the Web app
"""
return pulumi.get(self, "html_url")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the service extension.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
| 44.442656
| 342
| 0.641253
| 2,558
| 22,088
| 5.341282
| 0.091478
| 0.078094
| 0.084023
| 0.074069
| 0.878065
| 0.864744
| 0.847252
| 0.830125
| 0.798653
| 0.785918
| 0
| 0.002421
| 0.251992
| 22,088
| 496
| 343
| 44.532258
| 0.824537
| 0.417376
| 0
| 0.716
| 1
| 0
| 0.088872
| 0.003017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0.004
| 0.02
| 0.012
| 0.276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3600f0a884c82665449fb225eaae817016854055
| 19,542
|
py
|
Python
|
tests/test_client_ws.py
|
GeekLiB/aiohttp
|
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client_ws.py
|
GeekLiB/aiohttp
|
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client_ws.py
|
GeekLiB/aiohttp
|
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import base64
import hashlib
import os
from unittest import mock
import pytest
import aiohttp
from aiohttp import ClientWebSocketResponse, errors, hdrs, helpers
from aiohttp._ws_impl import WS_KEY
from aiohttp.log import ws_logger
@pytest.fixture
def key_data():
return os.urandom(16)
@pytest.fixture
def key(key_data):
return base64.b64encode(key_data)
@pytest.fixture
def ws_key(key):
return base64.b64encode(hashlib.sha1(key + WS_KEY).digest()).decode()
@asyncio.coroutine
def test_ws_connect(ws_key, loop, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
hdrs.SEC_WEBSOCKET_PROTOCOL: 'chat'
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
res = yield from aiohttp.ws_connect(
'http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert isinstance(res, ClientWebSocketResponse)
assert res.protocol == 'chat'
assert hdrs.ORIGIN not in m_req.call_args[1]["headers"]
@asyncio.coroutine
def test_ws_connect_with_origin(key_data, loop):
resp = mock.Mock()
resp.status = 403
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
origin = 'https://example.org/page.html'
with pytest.raises(errors.WSServerHandshakeError):
yield from aiohttp.ws_connect('http://test.org',
loop=loop,
origin=origin)
assert hdrs.ORIGIN in m_req.call_args[1]["headers"]
assert m_req.call_args[1]["headers"][hdrs.ORIGIN] == origin
@asyncio.coroutine
def test_ws_connect_custom_response(loop, ws_key, key_data):
class CustomResponse(ClientWebSocketResponse):
def read(self, decode=False):
return 'customized!'
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
res = yield from aiohttp.ws_connect(
'http://test.org',
ws_response_class=CustomResponse,
loop=loop)
assert res.read() == 'customized!'
@asyncio.coroutine
def test_ws_connect_global_loop(loop, ws_key, key_data):
asyncio.set_event_loop(loop)
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
resp = yield from aiohttp.ws_connect('http://test.org')
assert resp._loop is loop
asyncio.set_event_loop(None)
@asyncio.coroutine
def test_ws_connect_err_status(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 500
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
with pytest.raises(errors.WSServerHandshakeError) as ctx:
yield from aiohttp.ws_connect('http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert ctx.value.message == 'Invalid response status'
@asyncio.coroutine
def test_ws_connect_err_upgrade(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: 'test',
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
with pytest.raises(errors.WSServerHandshakeError) as ctx:
yield from aiohttp.ws_connect('http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert ctx.value.message == 'Invalid upgrade header'
@asyncio.coroutine
def test_ws_connect_err_conn(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: 'close',
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
with pytest.raises(errors.WSServerHandshakeError) as ctx:
yield from aiohttp.ws_connect('http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert ctx.value.message == 'Invalid connection header'
@asyncio.coroutine
def test_ws_connect_err_challenge(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: 'asdfasdfasdfasdfasdfasdf'
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
with pytest.raises(errors.WSServerHandshakeError) as ctx:
yield from aiohttp.ws_connect('http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert ctx.value.message == 'Invalid challenge response'
@asyncio.coroutine
def test_ws_connect_common_headers(ws_key, loop, key_data):
"""Emulate a headers dict being reused for a second ws_connect.
In this scenario, we need to ensure that the newly generated secret key
is sent to the server, not the stale key.
"""
headers = {}
@asyncio.coroutine
def test_connection():
@asyncio.coroutine
def mock_get(*args, **kwargs):
resp = mock.Mock()
resp.status = 101
key = kwargs.get('headers').get(hdrs.SEC_WEBSOCKET_KEY)
accept = base64.b64encode(
hashlib.sha1(base64.b64encode(base64.b64decode(key)) + WS_KEY)
.digest()).decode()
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: accept,
hdrs.SEC_WEBSOCKET_PROTOCOL: 'chat'
}
return resp
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get',
side_effect=mock_get) as m_req:
m_os.urandom.return_value = key_data
res = yield from aiohttp.ClientSession(loop=loop).ws_connect(
'http://test.org',
protocols=('t1', 't2', 'chat'),
headers=headers)
assert isinstance(res, ClientWebSocketResponse)
assert res.protocol == 'chat'
assert hdrs.ORIGIN not in m_req.call_args[1]["headers"]
yield from test_connection()
# Generate a new ws key
key_data = os.urandom(16)
yield from test_connection()
@asyncio.coroutine
def test_close(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter:
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
writer = WebSocketWriter.return_value = mock.Mock()
reader = mock.Mock()
resp.connection.reader.set_parser.return_value = reader
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
assert not resp.closed
msg = aiohttp.WSMessage(aiohttp.MsgType.CLOSE, b'', b'')
reader.read.return_value = helpers.create_future(loop)
reader.read.return_value.set_result(msg)
res = yield from resp.close()
writer.close.assert_called_with(1000, b'')
assert resp.closed
assert res
assert resp.exception() is None
# idempotent
res = yield from resp.close()
assert not res
assert writer.close.call_count == 1
@asyncio.coroutine
def test_close_exc(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter:
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
WebSocketWriter.return_value = mock.Mock()
reader = mock.Mock()
resp.connection.reader.set_parser.return_value = reader
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
assert not resp.closed
exc = ValueError()
reader.read.return_value = helpers.create_future(loop)
reader.read.return_value.set_exception(exc)
yield from resp.close()
assert resp.closed
assert resp.exception() is exc
@asyncio.coroutine
def test_close_exc2(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter:
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
writer = WebSocketWriter.return_value = mock.Mock()
resp.connection.reader.set_parser.return_value = mock.Mock()
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
assert not resp.closed
exc = ValueError()
writer.close.side_effect = exc
yield from resp.close()
assert resp.closed
assert resp.exception() is exc
resp._closed = False
writer.close.side_effect = asyncio.CancelledError()
with pytest.raises(asyncio.CancelledError):
yield from resp.close()
@asyncio.coroutine
def test_send_data_after_close(ws_key, key_data, loop, mocker):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
resp._writer._closing = True
mocker.spy(ws_logger, 'warning')
for meth, args in ((resp.ping, ()),
(resp.pong, ()),
(resp.send_str, ('s',)),
(resp.send_bytes, (b'b',)),
(resp.send_json, ({},))):
meth(*args)
assert ws_logger.warning.called
ws_logger.warning.reset_mock()
@asyncio.coroutine
def test_send_data_type_errors(ws_key, key_data, loop):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter:
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
WebSocketWriter.return_value = mock.Mock()
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
pytest.raises(TypeError, resp.send_str, b's')
pytest.raises(TypeError, resp.send_bytes, 'b')
pytest.raises(TypeError, resp.send_json, set())
@asyncio.coroutine
def test_reader_read_exception(ws_key, key_data, loop):
hresp = mock.Mock()
hresp.status = 101
hresp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
}
with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter:
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(hresp)
WebSocketWriter.return_value = mock.Mock()
reader = mock.Mock()
hresp.connection.reader.set_parser.return_value = reader
resp = yield from aiohttp.ws_connect('http://test.org',
loop=loop)
exc = ValueError()
reader.read.return_value = helpers.create_future(loop)
reader.read.return_value.set_exception(exc)
msg = yield from resp.receive()
assert msg.type == aiohttp.MsgType.ERROR
assert msg.type is msg.tp
assert resp.exception() is exc
@asyncio.coroutine
def test_receive_runtime_err(loop):
resp = ClientWebSocketResponse(
mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), 10.0,
True, True, loop)
resp._waiting = True
with pytest.raises(RuntimeError):
yield from resp.receive()
@asyncio.coroutine
def test_ws_connect_close_resp_on_err(loop, ws_key, key_data):
resp = mock.Mock()
resp.status = 500
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
with pytest.raises(errors.WSServerHandshakeError):
yield from aiohttp.ws_connect('http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
resp.close.assert_called_with()
@asyncio.coroutine
def test_ws_connect_non_overlapped_protocols(ws_key, loop, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
hdrs.SEC_WEBSOCKET_PROTOCOL: 'other,another'
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
res = yield from aiohttp.ws_connect(
'http://test.org',
protocols=('t1', 't2', 'chat'),
loop=loop)
assert res.protocol is None
@asyncio.coroutine
def test_ws_connect_non_overlapped_protocols_2(ws_key, loop, key_data):
resp = mock.Mock()
resp.status = 101
resp.headers = {
hdrs.UPGRADE: hdrs.WEBSOCKET,
hdrs.CONNECTION: hdrs.UPGRADE,
hdrs.SEC_WEBSOCKET_ACCEPT: ws_key,
hdrs.SEC_WEBSOCKET_PROTOCOL: 'other,another'
}
with mock.patch('aiohttp.client.os') as m_os:
with mock.patch('aiohttp.client.ClientSession.get') as m_req:
m_os.urandom.return_value = key_data
m_req.return_value = helpers.create_future(loop)
m_req.return_value.set_result(resp)
connector = aiohttp.TCPConnector(loop=loop, force_close=True)
res = yield from aiohttp.ws_connect(
'http://test.org',
protocols=('t1', 't2', 'chat'),
connector=connector,
loop=loop)
assert res.protocol is None
| 36.25603
| 78
| 0.595743
| 2,316
| 19,542
| 4.830743
| 0.078584
| 0.065874
| 0.04764
| 0.073293
| 0.810333
| 0.772256
| 0.745531
| 0.7298
| 0.714605
| 0.692796
| 0
| 0.008381
| 0.30391
| 19,542
| 538
| 79
| 36.32342
| 0.814085
| 0.010695
| 0
| 0.703196
| 0
| 0
| 0.08502
| 0.038834
| 0
| 0
| 0
| 0
| 0.077626
| 1
| 0.057078
| false
| 0
| 0.022831
| 0.009132
| 0.093607
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
361b9e5584cd51c71422e5ab5db28dd7f16651ec
| 138
|
py
|
Python
|
randnn/networks/__init__.py
|
jqhoogland/randnn
|
8cc428e22b8a8ee1eea28ab2b96bcd79854779d2
|
[
"MIT"
] | null | null | null |
randnn/networks/__init__.py
|
jqhoogland/randnn
|
8cc428e22b8a8ee1eea28ab2b96bcd79854779d2
|
[
"MIT"
] | null | null | null |
randnn/networks/__init__.py
|
jqhoogland/randnn
|
8cc428e22b8a8ee1eea28ab2b96bcd79854779d2
|
[
"MIT"
] | null | null | null |
from .dales_law_nn import *
from .gaussian_nn import *
from .scale_free_nn import *
from .scale_free_nn import *
from .sparse_nn import *
| 23
| 28
| 0.782609
| 23
| 138
| 4.347826
| 0.391304
| 0.4
| 0.48
| 0.34
| 0.54
| 0.54
| 0.54
| 0.54
| 0
| 0
| 0
| 0
| 0.144928
| 138
| 5
| 29
| 27.6
| 0.847458
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
364e685db102e4ea0ccd14b03fae925e885a053f
| 182
|
py
|
Python
|
util/str_contains.py
|
PokemonTCGDeals/BestBuyer
|
5d0abadd7043104e4c65a05a83f078144d8b0ba3
|
[
"MIT"
] | null | null | null |
util/str_contains.py
|
PokemonTCGDeals/BestBuyer
|
5d0abadd7043104e4c65a05a83f078144d8b0ba3
|
[
"MIT"
] | null | null | null |
util/str_contains.py
|
PokemonTCGDeals/BestBuyer
|
5d0abadd7043104e4c65a05a83f078144d8b0ba3
|
[
"MIT"
] | null | null | null |
def str_contains(haystack, needle):
return haystack.find(needle) >= 0
def str_contains_ignore_case(haystack, needle):
return str_contains(haystack.lower(), needle.lower())
| 26
| 57
| 0.752747
| 24
| 182
| 5.5
| 0.458333
| 0.25
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006289
| 0.126374
| 182
| 6
| 58
| 30.333333
| 0.823899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
36ceed28c7ab4deb7e85bf62b92ab340c3652e0a
| 3,381
|
py
|
Python
|
NewsCrawler/items.py
|
manashmndl/NewsParser
|
1402ad308b98155cf4d3828658196688f58060ec
|
[
"MIT"
] | 12
|
2016-11-09T17:38:20.000Z
|
2018-07-29T05:50:39.000Z
|
NewsCrawler/items.py
|
manashmndl/NewsCrawler
|
1402ad308b98155cf4d3828658196688f58060ec
|
[
"MIT"
] | 20
|
2016-11-01T08:03:37.000Z
|
2017-02-26T18:21:51.000Z
|
NewsCrawler/items.py
|
manashmandal/NewsCrawler
|
1402ad308b98155cf4d3828658196688f58060ec
|
[
"MIT"
] | 6
|
2016-11-18T20:53:54.000Z
|
2019-01-15T03:33:10.000Z
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
from scrapy import Item, Field
class DailyStarItem(Item):
# define the fields for your item here like:
# name = scrapy.Field()
_id = Field()
category = Field()
newspaper_section = Field()
reporter = Field()
last_updated = Field()
published_date = Field()
article = Field()
share_count = Field()
comment_count = Field()
title = Field()
url = Field()
breadcrumb = Field()
images = Field()
top_tag_line = Field()
bottom_tag_line = Field()
image_captions = Field()
newspaper_name = Field()
crawl_time = Field()
ml_tags = Field()
sentiment = Field()
# NER tags [only unique values]
ner_person = Field()
ner_money = Field()
ner_time = Field()
ner_organization = Field()
ner_location = Field()
ner_percent = Field()
# Ner tags [considering all ocurrances]
ner_list_person = Field()
ner_list_money = Field()
ner_list_time = Field()
ner_list_organization = Field()
ner_list_percent = Field()
ner_list_location = Field()
# These items are generated using 'newspaper' python package
generated_keywords = Field()
generated_summary = Field()
class ProthomAloItem(Item):
_id = Field()
category = Field()
newspaper_name = Field()
reporter = Field()
news_location = Field()
article = Field()
title = Field()
last_update = Field()
published_date = Field()
breadcrumb = Field()
images = Field()
ml_tags = Field()
image_captions = Field()
sentiment = Field()
url = Field()
crawl_time = Field()
# NER tags [only unique values]
ner_person = Field()
ner_money = Field()
ner_time = Field()
ner_organization = Field()
ner_location = Field()
ner_percent = Field()
# Ner tags [considering all ocurrances]
ner_list_person = Field()
ner_list_money = Field()
ner_list_time = Field()
ner_list_organization = Field()
ner_list_percent = Field()
ner_list_location = Field()
# These items are generated using 'newspaper' python package
generated_keywords = Field()
generated_summary = Field()
class DhakaTribuneItem(Item):
_id = Field()
category = Field()
newspaper_name = Field()
reporter = Field()
news_location = Field()
article = Field()
title = Field()
last_update = Field()
published_date = Field()
breadcrumb = Field()
images = Field()
ml_tags = Field()
image_captions = Field()
sentiment = Field()
url = Field()
excerpt = Field()
shoulder = Field()
images_credit = Field()
about_reporter = Field()
crawl_time = Field()
# NER tags [only unique values]
ner_person = Field()
ner_money = Field()
ner_time = Field()
ner_organization = Field()
ner_location = Field()
ner_percent = Field()
# Ner tags [considering all ocurrances]
ner_list_person = Field()
ner_list_money = Field()
ner_list_time = Field()
ner_list_organization = Field()
ner_list_percent = Field()
ner_list_location = Field()
# These items are generated using 'newspaper' python package
generated_keywords = Field()
generated_summary = Field()
| 24.678832
| 64
| 0.639752
| 387
| 3,381
| 5.351421
| 0.22739
| 0.139063
| 0.086915
| 0.028972
| 0.756156
| 0.727185
| 0.727185
| 0.727185
| 0.727185
| 0.727185
| 0
| 0.000396
| 0.25318
| 3,381
| 136
| 65
| 24.860294
| 0.819802
| 0.173026
| 0
| 0.862745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009804
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7fe481e805205ebfe08828ea7eea07e766505d11
| 146
|
py
|
Python
|
Sem-5/PIP/Hands-on Exercise 1/H1Q1.py
|
rituraj-iter/ASSIGNMENTS
|
f317a2990bac43202314320d6d97356498c44603
|
[
"MIT"
] | 10
|
2021-04-24T11:46:48.000Z
|
2022-01-17T05:14:37.000Z
|
Sem-5/PIP/Hands-on Exercise 1/H1Q1.py
|
rituraj-iter/ASSIGNMENTS
|
f317a2990bac43202314320d6d97356498c44603
|
[
"MIT"
] | 2
|
2021-06-28T11:51:50.000Z
|
2021-11-01T08:21:53.000Z
|
Sem-5/PIP/Hands-on Exercise 1/H1Q1.py
|
rituraj-iter/ASSIGNMENTS
|
f317a2990bac43202314320d6d97356498c44603
|
[
"MIT"
] | 16
|
2021-04-24T11:46:58.000Z
|
2022-03-02T05:08:19.000Z
|
print("Hello World!\nHello Again\nI like typing this.\nThis is fun.\nYay! Printing.\nI'd much rather you 'not'.\nI \"said\" do not touch this.")
| 73
| 145
| 0.705479
| 26
| 146
| 3.961538
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130137
| 146
| 1
| 146
| 146
| 0.811024
| 0
| 0
| 0
| 0
| 1
| 0.882759
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3d608e1bd1f850f4859d5adaa66365d7f0cddf0b
| 1,124
|
py
|
Python
|
problema10/operaciones.py
|
aaronorduna/PC3
|
08e571430d8e301a1682a14851c1675613c1a16e
|
[
"Apache-2.0"
] | null | null | null |
problema10/operaciones.py
|
aaronorduna/PC3
|
08e571430d8e301a1682a14851c1675613c1a16e
|
[
"Apache-2.0"
] | null | null | null |
problema10/operaciones.py
|
aaronorduna/PC3
|
08e571430d8e301a1682a14851c1675613c1a16e
|
[
"Apache-2.0"
] | null | null | null |
def suma():
try:
a = float(input("Ingrese un numero: "))
b = float(input("Ingrese un numero: "))
print(f"{a} + {b} = {a + b}")
except:
print("Error: tipo de dato inválido")
suma()
def resta():
try:
a = float(input("Ingrese un numero: "))
b = float(input("Ingrese un numero: "))
print(f"{a} - {b} = {a - b}")
except:
print("Error: tipo de dato inválido")
resta()
def producto():
try:
a = float(input("Ingrese un numero: "))
b = float(input("Ingrese un numero: "))
print(f"{a} * {b} = {a * b}")
except:
print("Error: tipo de dato inválido")
producto()
def division():
while(True):
try:
a = float(input("Ingrese un numero: "))
b = float(input("Ingrese un numero: "))
if b == 0:
print("Error: No es posible dividir entre cero")
print(f"{a}/{b} = {a / b}")
except:
print("Error: tipo de dato inválido")
else:
break
division()
| 22.039216
| 65
| 0.459964
| 131
| 1,124
| 3.946565
| 0.251908
| 0.154739
| 0.263056
| 0.294004
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0
| 0.001443
| 0.383452
| 1,124
| 50
| 66
| 22.48
| 0.744589
| 0
| 0
| 0.540541
| 0
| 0
| 0.351679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108108
| false
| 0
| 0
| 0
| 0.108108
| 0.243243
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43ef361faa7fcbd1cc05d0f4715057f6c20518b2
| 2,915
|
py
|
Python
|
cotidia/admin/tests/forms/admin/examplemodelone.py
|
hayden5-mwac/cotidia-admin
|
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
|
[
"BSD-3-Clause"
] | 2
|
2019-07-20T14:43:21.000Z
|
2021-04-30T15:43:49.000Z
|
cotidia/admin/tests/forms/admin/examplemodelone.py
|
hayden5-mwac/cotidia-admin
|
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
|
[
"BSD-3-Clause"
] | 16
|
2020-07-17T04:26:20.000Z
|
2022-03-23T14:47:31.000Z
|
cotidia/admin/tests/forms/admin/examplemodelone.py
|
hayden5-mwac/cotidia-admin
|
cfdd9d2677dd1098019fafbec8a6d07e1a42f9eb
|
[
"BSD-3-Clause"
] | 1
|
2020-05-18T20:56:45.000Z
|
2020-05-18T20:56:45.000Z
|
from django import forms
from betterforms.forms import BetterModelForm
from cotidia.admin.tests.models import ExampleModelOne
class ExampleModelOneAddForm(BetterModelForm):
class Meta:
model = ExampleModelOne
fields = [
"integer_field",
"float_field",
"decimal_field",
"boolean_field",
"nullboolean_field",
"char_field",
"text_field",
"email_field",
"slug_field",
"date_field",
"datetime_field",
"time_field",
"duration_field",
"other_model",
"many_to_many_field",
]
fieldsets = (
(
"info",
{
"fields": (
"integer_field",
"float_field",
"decimal_field",
"boolean_field",
"nullboolean_field",
"char_field",
"text_field",
"email_field",
"slug_field",
"date_field",
"datetime_field",
"time_field",
"duration_field",
"other_model",
"many_to_many_field",
),
"legend": "Example Model One details",
},
),
)
class ExampleModelOneUpdateForm(BetterModelForm):
class Meta:
model = ExampleModelOne
fields = [
"integer_field",
"float_field",
"decimal_field",
"boolean_field",
"nullboolean_field",
"char_field",
"text_field",
"email_field",
"slug_field",
"date_field",
"datetime_field",
"time_field",
"duration_field",
"other_model",
"many_to_many_field",
]
fieldsets = (
(
"info",
{
"fields": (
"integer_field",
"float_field",
"decimal_field",
"boolean_field",
"nullboolean_field",
"char_field",
"text_field",
"email_field",
"slug_field",
"date_field",
"datetime_field",
"time_field",
"duration_field",
"other_model",
"many_to_many_field",
),
"legend": "Example Model One details",
},
),
)
| 29.15
| 58
| 0.377702
| 176
| 2,915
| 5.869318
| 0.227273
| 0.050339
| 0.0697
| 0.089061
| 0.838335
| 0.838335
| 0.838335
| 0.838335
| 0.838335
| 0.838335
| 0
| 0
| 0.537221
| 2,915
| 99
| 59
| 29.444444
| 0.765752
| 0
| 0
| 0.83871
| 0
| 0
| 0.28199
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.075269
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1592a554d7b3a0dd48ca1f9ae8b3344cfc39c41
| 17,224
|
py
|
Python
|
Documentation/DataFormats/python/RecoJets_dataformats.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Documentation/DataFormats/python/RecoJets_dataformats.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Documentation/DataFormats/python/RecoJets_dataformats.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
'''
Created on Jun 26, 2013
@author: Mantas Stankevicius
@contact: mantas.stankevicius@cern.ch
http://cmsdoxy.web.cern.ch/cmsdoxy/dataformats/
@responsible:
'''
json = {
"full": {
"title": "RecoJets collections (in RECO and AOD)",
"data": [
{
"instance": "ak7JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "sc5JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ic5JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorExplicit",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "gk5JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "*",
"container": "recoCaloJets",
"desc": "No documentation"
},
{
"instance": "*",
"container": "recoTrackJets",
"desc": "No documentation"
},
{
"instance": "caloTowers",
"container": "*",
"desc": "No documentation"
},
{
"instance": "CastorTowerReco",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ic5JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "sisCone5JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "sisCone5JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "sisCone5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt6GenJets",
"container": "reco::GenJetCollection",
"desc": "Fastjet kT R=0.6 jets reconstructed from stable generator particles"
},
{
"instance": "ak7GenJets",
"container": "reco::GenJetCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from stable generator particles. Note that the label is antikt7GenJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "kt4GenJets",
"container": "reco::GenJetCollection",
"desc": "Fastjet kT R=0.4 jets reconstructed from stable generator particles"
},
{
"instance": "ak7CastorJetID",
"container": "reco::CastorJetIDValueMap",
"desc": "Corresponding JetID object to go with the ak7CastorJets, contains various information on how a jet in CASTOR looks, see CASTOR reconstruction page for more info"
},
{
"instance": "ak5GenJets",
"container": "reco::GenJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from stable generator particles. Note that the label is antikt5GenJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "kt4TrackJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7CastorJets",
"container": "reco::CastorTowerCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from CastorTowers"
},
{
"instance": "JetPlusTrackZSPCorJetAntiKt5",
"container": "reco::JPTJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from CaloTowers, corrected with track response within the jet cone."
},
{
"instance": "ak4TrackJets",
"container": "reco::TrackJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from tracks."
},
{
"instance": "kt6PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet kT R=0.6 jets reconstructed from PF particles"
},
{
"instance": "iterativeCone5PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "*",
"container": "recoPFJets",
"desc": "No documentation"
},
{
"instance": "*",
"container": "recoJPTJets",
"desc": "No documentation"
},
{
"instance": "towerMaker",
"container": "*",
"desc": "No documentation"
},
{
"instance": "trackRefsForJets",
"container": "recoRecoChargedRefCandidates",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet kT R=0.4 jets reconstructed from PF particles"
},
{
"instance": "ak7PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from PF particles. Note that the label is antikt7PFJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "ak4CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from CaloTowers with pT>0.5 GeV. Note that the label is antikt5CaloJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "kt6CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet kT R=0.6 jets reconstructed from CaloTowers with pT>0.5 GeV"
},
{
"instance": "kt4CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet kT R=0.4 jets reconstructed from CaloTowers with pT>0.5 GeV"
},
{
"instance": "ca4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from PF particles. Note that the label is antikt5PFJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "iterativeCone15CaloJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5CaloJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from CaloTowers with pT>0.5 GeV. Note that the label is antikt7CaloJets for CMSSW_3_1_X (Summer09 MC production)"
}
]
},
"aod": {
"title": "RecoJets collections (in AOD only)",
"data": [
{
"instance": "kt6PFJetsCentralNeutral",
"container": "double",
"desc": "No documentation"
},
{
"instance": "kt6PFJetsCentralNeutralTight",
"container": "double",
"desc": "No documentation"
},
{
"instance": "fixedGridRho*",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4PFJetsCHS*",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from CaloTowers with pT>0.5 GeV. Note that the label is antikt5CaloJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "ak4PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from PF particles. Note that the label is antikt5PFJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "kt6PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet kT R=0.6 jets reconstructed from PF particles"
},
{
"instance": "ak4TrackJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7PFJets",
"container": "reco::PFJetCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from PF particles. Note that the label is antikt7PFJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "caloTowers",
"container": "*",
"desc": "No documentation"
},
{
"instance": "trackRefsForJets",
"container": "recoRecoChargedRefCandidates",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorAtVertex",
"container": " ",
"desc": "tracks associated to all ak4CaloJets within a Cone R=0.5 at the vertex"
},
{
"instance": "CastorTowerReco",
"container": "reco::CastorTowerCollection",
"desc": "Collection of towers in CASTOR (RecHits in one phi sector summed over z)"
},
{
"instance": "ak7JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorExplicit",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7CastorJetID",
"container": "reco::CastorJetIDValueMap",
"desc": "Corresponding JetID object to go with the ak7CastorJets, contains various information on how a jet in CASTOR looks, see CASTOR reconstruction page for more info"
},
{
"instance": "ak7CastorJets",
"container": "reco::CastorTowerCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from CastorTowers"
},
{
"instance": "kt6PFJetsCentralChargedPileUp",
"container": "double",
"desc": "No documentation"
},
{
"instance": "kt6CaloJetsCentral",
"container": "double",
"desc": "No documentation"
}
]
},
"reco": {
"title": "RecoJets collections (in RECO only)",
"data": [
{
"instance": "kt4JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorAtVertex",
"container": " ",
"desc": "tracks associated to all ak4CaloJets within a Cone R=0.5 at the vertex"
},
{
"instance": "ak4JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4JetTracksAssociatorExplicit",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak5JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet kT R=0.4 jets reconstructed from CaloTowers with pT>0.5 GeV"
},
{
"instance": "ak4CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from CaloTowers with pT>0.5 GeV. Note that the label is antikt5CaloJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "iterativeCone5CaloJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt4PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "iterativeCone5PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4PFJetsCHS*",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak4TrackJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "JetPlusTrackZSPCorJetAntiKt5",
"container": "reco::JPTJetCollection",
"desc": "Fastjet Anti-kT R=0.5 jets reconstructed from CaloTowers, corrected with track response within the jet cone."
},
{
"instance": "trackRefsForJets",
"container": "recoRecoChargedRefCandidates",
"desc": "No documentation"
},
{
"instance": "kt4TrackJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "towerMaker",
"container": "*",
"desc": "No documentation"
},
{
"instance": "caloTowers",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ic5JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "CastorTowerReco",
"container": "reco::CastorTowerCollection",
"desc": "Collection of towers in CASTOR (RecHits in one phi sector summed over z)"
},
{
"instance": "fixedGridRho*",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt6PFJetsCentralNeutral",
"container": "double",
"desc": "No documentation"
},
{
"instance": "kt6PFJetsCentralNeutralTight",
"container": "double",
"desc": "No documentation"
},
{
"instance": "kt6CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet kT R=0.6 jets reconstructed from CaloTowers with pT>0.5 GeV"
},
{
"instance": "ak7CaloJets",
"container": "reco::CaloJetCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from CaloTowers with pT>0.5 GeV. Note that the label is antikt7CaloJets for CMSSW_3_1_X (Summer09 MC production)"
},
{
"instance": "iterativeCone15CaloJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt6PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7PFJets",
"container": "*",
"desc": "No documentation"
},
{
"instance": "kt6PFJetsCentralChargedPileUp",
"container": "double",
"desc": "No documentation"
},
{
"instance": "kt6CaloJetsCentral",
"container": "double",
"desc": "No documentation"
},
{
"instance": "ak4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetExtender",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetTracksAssociatorAtCaloFace",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7JetTracksAssociatorAtVertex",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ak7CastorJetID",
"container": "reco::CastorJetIDValueMap",
"desc": "Corresponding JetID object to go with the ak7CastorJets, contains various information on how a jet in CASTOR looks, see CASTOR reconstruction page for more info"
},
{
"instance": "ak7CastorJets",
"container": "reco::CastorTowerCollection",
"desc": "Fastjet Anti-kT R=0.7 jets reconstructed from CastorTowers"
},
{
"instance": "kt4JetID",
"container": "*",
"desc": "No documentation"
},
{
"instance": "ic5JetID",
"container": "*",
"desc": "No documentation"
}
]
}
}
| 28.097879
| 176
| 0.560091
| 1,366
| 17,224
| 7.038067
| 0.128111
| 0.052424
| 0.166008
| 0.230289
| 0.950177
| 0.923445
| 0.920429
| 0.825463
| 0.799979
| 0.774288
| 0
| 0.019601
| 0.283209
| 17,224
| 612
| 177
| 28.143791
| 0.759112
| 0.008941
| 0
| 0.553156
| 0
| 0.026578
| 0.60777
| 0.116308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1a284284641a21badfa6d0822028c72002fdd96
| 4,466
|
py
|
Python
|
ensemble.py
|
shijun18/Spine_Seg
|
90c41d8ee08235c43bd3a5236da5a0ee7066fced
|
[
"MIT"
] | 3
|
2021-07-26T08:01:55.000Z
|
2021-12-28T12:40:30.000Z
|
ensemble.py
|
shijun18/Spine_Seg
|
90c41d8ee08235c43bd3a5236da5a0ee7066fced
|
[
"MIT"
] | null | null | null |
ensemble.py
|
shijun18/Spine_Seg
|
90c41d8ee08235c43bd3a5236da5a0ee7066fced
|
[
"MIT"
] | null | null | null |
import sys
import os
import SimpleITK as sitk
import numpy as np
from tqdm import tqdm
def convert_to_onehot(label,classes):
final_label = np.zeros((classes+1,) + label.shape,dtype=np.uint8)
for i in range(classes+1):
final_label[i] = (label==i)
return final_label
version_list = ['v4.3-balance','v4.3-all','v4.10-balance']
weight = [2,3,2]
for ver in version_list:
root_path = './post_result/Spine/origin/{}/'.format(ver)
for part in [9,10]:
num_classes = part
result_list = [root_path + f'Part_{part}/' + f'fold{case}' for case in range(1,4)]
# save_folder = './result/Spine/v4.10-balance/Part_{}/fusion/'.format(part)
save_folder = './post_result/Spine/{}/Part_{}/weighted_fusion/'.format(ver,part)
if not os.path.exists(save_folder):
os.makedirs(save_folder)
for item in tqdm(os.listdir(result_list[0])):
img_list = [os.path.join(case, item) for case in result_list]
data = sitk.ReadImage(img_list[0])
label = sitk.GetArrayFromImage(data).astype(np.uint8)
spacing = data.GetSpacing()
origin = data.GetOrigin()
direction = data.GetDirection()
'''
final_label = np.zeros_like(label,dtype=np.uint8)
for z in range(num_classes):
tmp_roi = np.zeros_like(final_label,dtype=np.uint8)
for img_path in img_list:
data = sitk.ReadImage(img_path)
tmp_label = sitk.GetArrayFromImage(data).astype(np.uint8)
tmp_roi += (tmp_label==z+1).astype(np.uint8)
final_label[tmp_roi > len(result_list)//2] = z+1
'''
final_label = np.zeros((num_classes+1,) + label.shape,dtype=np.uint8)
for i,img_path in enumerate(img_list):
data = sitk.ReadImage(img_path)
tmp_label = sitk.GetArrayFromImage(data).astype(np.uint8)
final_label += convert_to_onehot(tmp_label,num_classes)*weight[i]
final_label = np.argmax(final_label,axis=0)
sitk_data = sitk.GetImageFromArray(final_label)
sitk_data.SetSpacing(spacing)
sitk_data.SetOrigin(origin)
sitk_data.SetDirection(direction)
save_path = os.path.join(save_folder,item)
sitk.WriteImage(sitk_data, save_path)
for part in [9,10]:
save_folder = './post_result/Spine/final/Part_{}/weighted_fusion/'.format(part)
# save_folder = './result/Spine/final/Part_{}/fusion/'.format(part)
num_classes = part
result_list = [ './post_result/Spine/{}/Part_{}/weighted_fusion/'.format(ver,part) for ver in version_list]
# result_list = [ './result/Spine/{}/Part_{}/fusion/'.format(ver,part) for ver in version_list]
# save_folder = './result/Spine/v4.10-balance/Part_{}/fusion/'.format(part)
if not os.path.exists(save_folder):
os.makedirs(save_folder)
for item in tqdm(os.listdir(result_list[0])):
img_list = [os.path.join(case, item) for case in result_list]
data = sitk.ReadImage(img_list[0])
label = sitk.GetArrayFromImage(data).astype(np.uint8)
spacing = data.GetSpacing()
origin = data.GetOrigin()
direction = data.GetDirection()
'''
final_label = np.zeros_like(label,dtype=np.uint8)
for z in range(num_classes):
tmp_roi = np.zeros_like(final_label,dtype=np.uint8)
for img_path in img_list:
data = sitk.ReadImage(img_path)
tmp_label = sitk.GetArrayFromImage(data).astype(np.uint8)
tmp_roi += (tmp_label==z+1).astype(np.uint8)
final_label[tmp_roi > len(result_list)//2] = z+1
'''
final_label = np.zeros((num_classes+1,) + label.shape,dtype=np.uint8)
for i,img_path in enumerate(img_list):
data = sitk.ReadImage(img_path)
tmp_label = sitk.GetArrayFromImage(data).astype(np.uint8)
final_label += convert_to_onehot(tmp_label,num_classes)*weight[i]
final_label = np.argmax(final_label,axis=0)
sitk_data = sitk.GetImageFromArray(final_label)
sitk_data.SetSpacing(spacing)
sitk_data.SetOrigin(origin)
sitk_data.SetDirection(direction)
save_path = os.path.join(save_folder,item)
sitk.WriteImage(sitk_data, save_path)
| 39.875
| 111
| 0.618451
| 592
| 4,466
| 4.459459
| 0.14527
| 0.07197
| 0.039394
| 0.039773
| 0.85947
| 0.801894
| 0.801894
| 0.801894
| 0.801894
| 0.734091
| 0
| 0.016546
| 0.25571
| 4,466
| 111
| 112
| 40.234234
| 0.777678
| 0.068742
| 0
| 0.71875
| 0
| 0
| 0.071429
| 0.054273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015625
| false
| 0
| 0.078125
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1a59f6213b88434f2edae67ae9a8f72c142d71b
| 4,374
|
py
|
Python
|
tests/parser/if_statement_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | 1
|
2021-04-01T20:22:36.000Z
|
2021-04-01T20:22:36.000Z
|
tests/parser/if_statement_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | 1
|
2020-11-20T22:24:38.000Z
|
2020-11-20T22:24:38.000Z
|
tests/parser/if_statement_test.py
|
OtavioHenrique/yalul
|
ce99e32365ed5607527b9f2f39705ad5d9e20ba2
|
[
"MIT"
] | null | null | null |
from yalul.lex.token import Token
from yalul.lex.token_type import TokenType
from yalul.parser import Parser
from yalul.parsers.ast.nodes.statements.expressions.grouping import Grouping
from yalul.parsers.ast.nodes.statements.expressions.binary import Binary
from yalul.parsers.ast.nodes.statements.expressions.var_assignment import VarAssignment
from yalul.parsers.ast.nodes.statements.if_statement import If
class TestIfStatements:
"""Test parser generating if statements"""
def test_parser_run_generates_correct_ast_if_statements(self):
"""
Validates if parser is generating a correct AST for if statements
"""
tokens = [
Token(TokenType.IF, 'if'),
Token(TokenType.LEFT_PAREN, '('),
Token(TokenType.INTEGER, '42'),
Token(TokenType.GREATER, '>'),
Token(TokenType.INTEGER, '1'),
Token(TokenType.RIGHT_PAREN, ')'),
Token(TokenType.LEFT_BRACE, '{'),
Token(TokenType.IDENTIFIER, 'everything'),
Token(TokenType.EQUAL, '='),
Token(TokenType.INTEGER, '42'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.INTEGER, '1'),
Token(TokenType.SUM, '+'),
Token(TokenType.INTEGER, '90'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.RIGHT_BRACE, '}'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.EOF, 'End of File')
]
parser_response = Parser(tokens).parse()
first_statement_ast = parser_response.ast.statements[0]
assert type(first_statement_ast) is If
assert type(first_statement_ast.condition) is Grouping
assert type(first_statement_ast.condition.value) is Binary
assert len(first_statement_ast.then_block.statements) == 2
assert type(first_statement_ast.then_block.statements[0]) is VarAssignment
assert type(first_statement_ast.then_block.statements[1]) is Binary
assert first_statement_ast.else_block is None
class TestIfElseStatements:
"""Test parser generating else statements"""
def test_parser_run_generates_correct_ast_if_else_statements(self):
"""
Validates if parser is generating a correct AST for if else statements
"""
tokens = [
Token(TokenType.IF, 'if'),
Token(TokenType.LEFT_PAREN, '('),
Token(TokenType.INTEGER, '42'),
Token(TokenType.GREATER, '>'),
Token(TokenType.INTEGER, '1'),
Token(TokenType.RIGHT_PAREN, ')'),
Token(TokenType.LEFT_BRACE, '{'),
Token(TokenType.IDENTIFIER, 'everything'),
Token(TokenType.EQUAL, '='),
Token(TokenType.INTEGER, '42'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.INTEGER, '1'),
Token(TokenType.SUM, '+'),
Token(TokenType.INTEGER, '90'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.RIGHT_BRACE, '}'),
Token(TokenType.ELSE, 'else'),
Token(TokenType.LEFT_BRACE, '{'),
Token(TokenType.IDENTIFIER, 'everything'),
Token(TokenType.EQUAL, '='),
Token(TokenType.INTEGER, '42'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.RIGHT_BRACE, '}'),
Token(TokenType.END_STATEMENT, 'End of Statement'),
Token(TokenType.EOF, 'End of File')
]
parser_response = Parser(tokens).parse()
first_statement_ast = parser_response.ast.statements[0]
assert len(parser_response.errors()) == 0
assert type(first_statement_ast) is If
assert type(first_statement_ast.condition) is Grouping
assert type(first_statement_ast.condition.value) is Binary
assert len(first_statement_ast.then_block.statements) == 2
assert type(first_statement_ast.then_block.statements[0]) is VarAssignment
assert type(first_statement_ast.then_block.statements[1]) is Binary
assert first_statement_ast.else_block is not None
assert len(first_statement_ast.else_block.statements) == 1
assert type(first_statement_ast.else_block.statements[0]) is VarAssignment
| 43.74
| 87
| 0.648605
| 488
| 4,374
| 5.637295
| 0.137295
| 0.21883
| 0.111232
| 0.095965
| 0.881861
| 0.869139
| 0.830607
| 0.781534
| 0.781534
| 0.747365
| 0
| 0.008745
| 0.241884
| 4,374
| 99
| 88
| 44.181818
| 0.820869
| 0.048468
| 0
| 0.759494
| 0
| 0
| 0.050426
| 0
| 0
| 0
| 0
| 0
| 0.21519
| 1
| 0.025316
| false
| 0
| 0.088608
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1dcf70c32c8a30684d4438147c20a5ea326d1de
| 92,979
|
py
|
Python
|
rmgpy/test_data/testing_database/thermo/groups/gauche.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | null | null | null |
rmgpy/test_data/testing_database/thermo/groups/gauche.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | null | null | null |
rmgpy/test_data/testing_database/thermo/groups/gauche.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
name = "Gauche Interaction Corrections"
shortDesc = u""
longDesc = u"""
"""
entry(
index = 0,
label = "CsOsCdSs",
group =
"""
1 * [Cs,Os,Cd,Ss] u0
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 1,
label = "Cs(RRRR)",
group =
"""
1 * Cs u0
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 2,
label = "Cs(CsRRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 3,
label = "Cs(Cs(CsRR)RRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 4,
label = "Cs(Cs(CsCsR)RRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 5,
label = "Cs(Cs(CsCsCs)RRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 6,
label = "Cs(CsCsRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S}
3 Cs u0 {1,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 7,
label = "Cs(Cs(CsRR)CsRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 8,
label = "Cs(Cs(CsRR)Cs(CsRR)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 9,
label = "Cs(Cs(CsCsR)CsRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 10,
label = "Cs(Cs(CsCsR)Cs(CsRR)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 11,
label = "Cs(Cs(CsCsR)Cs(CsCsR)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 12,
label = "Cs(Cs(CsCsCs)CsRR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 13,
label = "Cs(Cs(CsCsCs)Cs(CsRR)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 14,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 15,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)RR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 16,
label = "Cs(CsCsCsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S}
3 Cs u0 {1,S}
4 Cs u0 {1,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 17,
label = "Cs(Cs(CsRR)CsCsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 18,
label = "Cs(Cs(CsRR)Cs(CsRR)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 19,
label = "Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 20,
label = "Cs(Cs(CsCsR)CsCsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 21,
label = "Cs(Cs(CsCsR)Cs(CsRR)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 22,
label = "Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 23,
label = "Cs(Cs(CsCsR)Cs(CsCsR)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 24,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 25,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 26,
label = "Cs(Cs(CsCsCs)CsCsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 27,
label = "Cs(Cs(CsCsCs)Cs(CsRR)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 28,
label = "Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 29,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 30,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 31,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 32,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)CsR)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 33,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 34,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 35,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)R)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 Cs u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 36,
label = "Cs(CsCsCsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S}
3 Cs u0 {1,S}
4 Cs u0 {1,S}
5 Cs u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 37,
label = "Cs(Cs(CsRR)CsCsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 38,
label = "Cs(Cs(CsRR)Cs(CsRR)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 39,
label = "Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 40,
label = "Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 41,
label = "Cs(Cs(CsCsR)CsCsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 42,
label = "Cs(Cs(CsCsR)Cs(CsRR)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 43,
label = "Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 44,
label = "Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 45,
label = "Cs(Cs(CsCsR)Cs(CsCsR)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 46,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 47,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 48,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 49,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (5.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 50,
label = "Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs(CsCsR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 Cs u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (6.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 51,
label = "Cs(Cs(CsCsCs)CsCsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 52,
label = "Cs(Cs(CsCsCs)Cs(CsRR)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (3.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 53,
label = "Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 54,
label = "Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 55,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 56,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 57,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (5.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 58,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (5.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 59,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (6.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 60,
label = "Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs(CsCsR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 Cs u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (7.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 61,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)CsCs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (4.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 62,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (5.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 63,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (6.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 64,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (6.4,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 65,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (7.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 66,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
15 Cs u0 {5,S}
16 Cs u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 67,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs)",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 Cs u0 {4,S}
15 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (7.2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 68,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 Cs u0 {4,S}
15 Cs u0 {5,S}
16 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 69,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 Cs u0 {4,S}
15 Cs u0 {5,S}
16 Cs u0 {5,S}
17 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (8.8,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 70,
label = "Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs))",
group =
"""
1 * Cs u0 {2,S} {3,S} {4,S} {5,S}
2 Cs u0 {1,S} {6,S} {7,S} {8,S}
3 Cs u0 {1,S} {9,S} {10,S} {11,S}
4 Cs u0 {1,S} {12,S} {13,S} {14,S}
5 Cs u0 {1,S} {15,S} {16,S} {17,S}
6 Cs u0 {2,S}
7 Cs u0 {2,S}
8 Cs u0 {2,S}
9 Cs u0 {3,S}
10 Cs u0 {3,S}
11 Cs u0 {3,S}
12 Cs u0 {4,S}
13 Cs u0 {4,S}
14 Cs u0 {4,S}
15 Cs u0 {5,S}
16 Cs u0 {5,S}
17 Cs u0 {5,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (9.6,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 71,
label = "Os(RR)",
group =
"""
1 * Os u0
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 72,
label = "Os(CsR)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 73,
label = "Os(Cs(CsRR)R)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 Cs u0 {2,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 74,
label = "Os(Cs(CsCsR)R)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 75,
label = "Os(Cs(CsCsCs)R)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 Cs u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 76,
label = "Os(CsCs)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S}
3 Cs u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 77,
label = "Os(Cs(CsRR)Cs)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 78,
label = "Os(Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
7 Cs u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 79,
label = "Os(Cs(CsCsR)Cs)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.5,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 80,
label = "Os(Cs(CsCsR)Cs(CsRR))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
7 Cs u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0.5,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 81,
label = "Os(Cs(CsCsR)Cs(CsCsR))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {2,S}
7 Cs u0 {3,S}
8 Cs u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 82,
label = "Os(Cs(CsCsCs)Cs)",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 Cs u0 {2,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 83,
label = "Os(Cs(CsCsCs)Cs(CsRR))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 Cs u0 {2,S}
7 Cs u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 84,
label = "Os(Cs(CsCsCs)Cs(CsCsR))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 Cs u0 {2,S}
7 Cs u0 {3,S}
8 Cs u0 {3,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1.5,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 85,
label = "Os(Cs(CsCsCs)Cs(CsCsCs))",
group =
"""
1 * Os u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 Cs u0 {1,S} {7,S} {8,S} {9,S}
4 Cs u0 {2,S}
5 Cs u0 {2,S}
6 Cs u0 {2,S}
7 Cs u0 {3,S}
8 Cs u0 {3,S}
9 Cs u0 {3,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 86,
label = "Cd(CsCs)",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S}
4 Cs u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 87,
label = "Cd(Cs(CsRR)Cs)",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 88,
label = "Cd(Cs(CsRR)Cs(CsRR))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 Cs u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 89,
label = "Cd(Cs(CsCsR)Cs)",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 90,
label = "Cd(Cs(CsCsR)Cs(CsRR))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 Cs u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 91,
label = "Cd(Cs(CsCsR)Cs(CsCsR))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {3,S}
8 Cs u0 {4,S}
9 Cs u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 92,
label = "Cd(Cs(CsCsCs)Cs)",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 Cs u0 {3,S}
8 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 93,
label = "Cd(Cs(CsCsCs)Cs(CsRR))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 Cs u0 {3,S}
8 Cs u0 {4,S}
9 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (1,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 94,
label = "Cd(Cs(CsCsCs)Cs(CsCsR))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 Cs u0 {3,S}
8 Cs u0 {4,S}
9 Cs u0 {4,S}
10 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 95,
label = "Cd(Cs(CsCsCs)Cs(CsCsCs))",
group =
"""
1 * Cd u0 {2,D} {3,S} {4,S}
2 Cd u0 {1,D}
3 Cs u0 {1,S} {5,S} {6,S} {7,S}
4 Cs u0 {1,S} {8,S} {9,S} {10,S}
5 Cs u0 {3,S}
6 Cs u0 {3,S}
7 Cs u0 {3,S}
8 Cs u0 {4,S}
9 Cs u0 {4,S}
10 Cs u0 {4,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (2,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 96,
label = "Ss(RR)",
group =
"""
1 * Ss u0
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 97,
label = "Ss(CsR)",
group =
"""
1 * Ss u0 {2,S} {3,S}
2 Cs u0 {1,S}
3 [Cd,Cdd,Ct,Cb,Cbf,Os,CO,H] u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 98,
label = "Ss(CsH)",
group =
"""
1 * Ss u0 {2,S} {3,S}
2 Cs u0 {1,S}
3 H u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 99,
label = "Ss(Cs(CsHH)H)",
group =
"""
1 * Ss u0 {2,S} {3,S}
2 Cs u0 {1,S} {4,S} {5,S} {6,S}
3 H u0 {1,S}
4 H u0 {2,S}
5 H u0 {2,S}
6 Cs u0 {2,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0.33,0.62,0.67,0.59,0.38,0.21,-0.01],'cal/(mol*K)'),
H298 = (-0.97,'kcal/mol'),
S298 = (-1.01,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
entry(
index = 100,
label = "Ss(CsCs)",
group =
"""
1 * Ss u0 {2,S} {3,S}
2 Cs u0 {1,S}
3 Cs u0 {1,S}
""",
thermo = ThermoData(
Tdata = ([300,400,500,600,800,1000,1500],'K'),
Cpdata = ([0,0,0,0,0,0,0],'cal/(mol*K)'),
H298 = (0,'kcal/mol'),
S298 = (0,'cal/(mol*K)'),
),
shortDesc = u"""""",
longDesc =
u"""
""",
)
tree(
"""
L1: CsOsCdSs
L2: Cs(RRRR)
L3: Cs(CsRRR)
L4: Cs(Cs(CsRR)RRR)
L4: Cs(Cs(CsCsR)RRR)
L4: Cs(Cs(CsCsCs)RRR)
L3: Cs(CsCsRR)
L4: Cs(Cs(CsRR)CsRR)
L4: Cs(Cs(CsRR)Cs(CsRR)RR)
L4: Cs(Cs(CsCsR)CsRR)
L4: Cs(Cs(CsCsR)Cs(CsRR)RR)
L4: Cs(Cs(CsCsR)Cs(CsCsR)RR)
L4: Cs(Cs(CsCsCs)CsRR)
L4: Cs(Cs(CsCsCs)Cs(CsRR)RR)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)RR)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)RR)
L3: Cs(CsCsCsR)
L4: Cs(Cs(CsRR)CsCsR)
L4: Cs(Cs(CsRR)Cs(CsRR)CsR)
L4: Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)R)
L4: Cs(Cs(CsCsR)CsCsR)
L4: Cs(Cs(CsCsR)Cs(CsRR)CsR)
L4: Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)R)
L4: Cs(Cs(CsCsR)Cs(CsCsR)CsR)
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)R)
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)R)
L4: Cs(Cs(CsCsCs)CsCsR)
L4: Cs(Cs(CsCsCs)Cs(CsRR)CsR)
L4: Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)R)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)CsR)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)R)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)R)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)CsR)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)R)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)R)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)R)
L3: Cs(CsCsCsCs)
L4: Cs(Cs(CsRR)CsCsCs)
L4: Cs(Cs(CsRR)Cs(CsRR)CsCs)
L4: Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)Cs)
L4: Cs(Cs(CsRR)Cs(CsRR)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsR)CsCsCs)
L4: Cs(Cs(CsCsR)Cs(CsRR)CsCs)
L4: Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)Cs)
L4: Cs(Cs(CsCsR)Cs(CsRR)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsR)Cs(CsCsR)CsCs)
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)Cs)
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs)
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs(CsRR))
L4: Cs(Cs(CsCsR)Cs(CsCsR)Cs(CsCsR)Cs(CsCsR))
L4: Cs(Cs(CsCsCs)CsCsCs)
L4: Cs(Cs(CsCsCs)Cs(CsRR)CsCs)
L4: Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsRR)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsR)CsCs)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR)Cs(CsCsR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)CsCs)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR)Cs(CsCsR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs)
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsRR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsR))
L4: Cs(Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs)Cs(CsCsCs))
L2: Os(RR)
L3: Os(CsR)
L4: Os(Cs(CsRR)R)
L4: Os(Cs(CsCsR)R)
L4: Os(Cs(CsCsCs)R)
L3: Os(CsCs)
L4: Os(Cs(CsRR)Cs)
L4: Os(Cs(CsRR)Cs(CsRR))
L4: Os(Cs(CsCsR)Cs)
L4: Os(Cs(CsCsR)Cs(CsRR))
L4: Os(Cs(CsCsR)Cs(CsCsR))
L4: Os(Cs(CsCsCs)Cs)
L4: Os(Cs(CsCsCs)Cs(CsRR))
L4: Os(Cs(CsCsCs)Cs(CsCsR))
L4: Os(Cs(CsCsCs)Cs(CsCsCs))
L2: Cd(CsCs)
L3: Cd(Cs(CsRR)Cs)
L3: Cd(Cs(CsRR)Cs(CsRR))
L3: Cd(Cs(CsCsR)Cs)
L3: Cd(Cs(CsCsR)Cs(CsRR))
L3: Cd(Cs(CsCsR)Cs(CsCsR))
L3: Cd(Cs(CsCsCs)Cs)
L3: Cd(Cs(CsCsCs)Cs(CsRR))
L3: Cd(Cs(CsCsCs)Cs(CsCsR))
L3: Cd(Cs(CsCsCs)Cs(CsCsCs))
L2: Ss(RR)
L3: Ss(CsR)
L4: Ss(CsH)
L5: Ss(Cs(CsHH)H)
L3: Ss(CsCs)
"""
)
| 28.706082
| 71
| 0.37189
| 16,308
| 92,979
| 2.120309
| 0.010486
| 0.086298
| 0.04338
| 0.107756
| 0.972266
| 0.96223
| 0.943721
| 0.936896
| 0.92796
| 0.895627
| 0
| 0.173399
| 0.403876
| 92,979
| 3,238
| 72
| 28.714948
| 0.450448
| 0.000387
| 0
| 0.725838
| 0
| 0
| 0.168306
| 0.057856
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1e36470ddfed533a1f73804cc121c884c9f17b2
| 2,241
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpv6PimNeighbor/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpv6PimNeighbor/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpv6PimNeighbor/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"vrf": {
"default": {
"interfaces": {
"Port-channel2.103": {
"address_family": {
"ipv6": {
"neighbors": {
"FE80::21A:30FF:FE47:6EC1": {
"dr_priority": 1,
"expiration": "00:01:31",
"bidir_capable": True,
"up_time": "1d13h",
"interface": "Port-channel2.103",
"genid_capable": True,
}
}
}
}
},
"Port-channel2.101": {
"address_family": {
"ipv6": {
"neighbors": {
"FE80::21A:30FF:FE47:6EC1": {
"dr_priority": 1,
"expiration": "00:01:31",
"bidir_capable": True,
"up_time": "1d13h",
"interface": "Port-channel2.101",
"genid_capable": True,
}
}
}
}
},
"Port-channel2.100": {
"address_family": {
"ipv6": {
"neighbors": {
"FE80::21A:30FF:FE47:6EC1": {
"dr_priority": 1,
"expiration": "00:01:36",
"bidir_capable": True,
"up_time": "1d13h",
"interface": "Port-channel2.100",
"genid_capable": True,
}
}
}
}
},
}
}
}
}
| 39.315789
| 69
| 0.228469
| 101
| 2,241
| 4.910891
| 0.346535
| 0.145161
| 0.102823
| 0.157258
| 0.834677
| 0.721774
| 0.721774
| 0.721774
| 0.721774
| 0.625
| 0
| 0.119015
| 0.673806
| 2,241
| 56
| 70
| 40.017857
| 0.559508
| 0
| 0
| 0.464286
| 0
| 0
| 0.224453
| 0.032129
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62e5abdd63b9a729d43a6f645c3c42f5d364e5b1
| 83,589
|
py
|
Python
|
sdk/python/pulumi_keycloak/ldap/user_federation.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2020-04-28T15:20:56.000Z
|
2022-03-24T18:00:17.000Z
|
sdk/python/pulumi_keycloak/ldap/user_federation.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 49
|
2020-02-06T17:53:35.000Z
|
2022-03-25T19:36:08.000Z
|
sdk/python/pulumi_keycloak/ldap/user_federation.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-06-09T01:08:56.000Z
|
2021-12-07T15:30:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['UserFederationArgs', 'UserFederation']
@pulumi.input_type
class UserFederationArgs:
def __init__(__self__, *,
connection_url: pulumi.Input[str],
rdn_ldap_attribute: pulumi.Input[str],
realm_id: pulumi.Input[str],
user_object_classes: pulumi.Input[Sequence[pulumi.Input[str]]],
username_ldap_attribute: pulumi.Input[str],
users_dn: pulumi.Input[str],
uuid_ldap_attribute: pulumi.Input[str],
batch_size_for_sync: Optional[pulumi.Input[int]] = None,
bind_credential: Optional[pulumi.Input[str]] = None,
bind_dn: Optional[pulumi.Input[str]] = None,
cache: Optional[pulumi.Input['UserFederationCacheArgs']] = None,
changed_sync_period: Optional[pulumi.Input[int]] = None,
connection_timeout: Optional[pulumi.Input[str]] = None,
custom_user_search_filter: Optional[pulumi.Input[str]] = None,
edit_mode: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
full_sync_period: Optional[pulumi.Input[int]] = None,
import_enabled: Optional[pulumi.Input[bool]] = None,
kerberos: Optional[pulumi.Input['UserFederationKerberosArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
pagination: Optional[pulumi.Input[bool]] = None,
priority: Optional[pulumi.Input[int]] = None,
read_timeout: Optional[pulumi.Input[str]] = None,
search_scope: Optional[pulumi.Input[str]] = None,
start_tls: Optional[pulumi.Input[bool]] = None,
sync_registrations: Optional[pulumi.Input[bool]] = None,
trust_email: Optional[pulumi.Input[bool]] = None,
use_password_modify_extended_op: Optional[pulumi.Input[bool]] = None,
use_truststore_spi: Optional[pulumi.Input[str]] = None,
validate_password_policy: Optional[pulumi.Input[bool]] = None,
vendor: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a UserFederation resource.
:param pulumi.Input[str] connection_url: Connection URL to the LDAP server.
:param pulumi.Input[str] rdn_ldap_attribute: Name of the LDAP attribute to use as the relative distinguished name.
:param pulumi.Input[str] realm_id: The realm that this provider will provide user federation for.
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_object_classes: Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
:param pulumi.Input[str] username_ldap_attribute: Name of the LDAP attribute to use as the Keycloak username.
:param pulumi.Input[str] users_dn: Full DN of LDAP tree where your users are.
:param pulumi.Input[str] uuid_ldap_attribute: Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
:param pulumi.Input[int] batch_size_for_sync: The number of users to sync within a single transaction. Defaults to `1000`.
:param pulumi.Input[str] bind_credential: Password of LDAP admin. This attribute must be set if `bind_dn` is set.
:param pulumi.Input[str] bind_dn: DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
:param pulumi.Input['UserFederationCacheArgs'] cache: A block containing the cache settings.
:param pulumi.Input[int] changed_sync_period: How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
:param pulumi.Input[str] connection_timeout: LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] custom_user_search_filter: Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
:param pulumi.Input[str] edit_mode: Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
:param pulumi.Input[bool] enabled: When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
:param pulumi.Input[int] full_sync_period: How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
:param pulumi.Input[bool] import_enabled: When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
:param pulumi.Input['UserFederationKerberosArgs'] kerberos: A block containing the kerberos settings.
:param pulumi.Input[str] name: Display name of the provider when displayed in the console.
:param pulumi.Input[bool] pagination: When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
:param pulumi.Input[int] priority: Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
:param pulumi.Input[str] read_timeout: LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] search_scope: Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
:param pulumi.Input[bool] start_tls: When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
:param pulumi.Input[bool] sync_registrations: When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
:param pulumi.Input[bool] trust_email: If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] use_password_modify_extended_op: When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
:param pulumi.Input[str] use_truststore_spi: Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
:param pulumi.Input[bool] validate_password_policy: When `true`, Keycloak will validate passwords using the realm policy before updating it.
:param pulumi.Input[str] vendor: Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
pulumi.set(__self__, "connection_url", connection_url)
pulumi.set(__self__, "rdn_ldap_attribute", rdn_ldap_attribute)
pulumi.set(__self__, "realm_id", realm_id)
pulumi.set(__self__, "user_object_classes", user_object_classes)
pulumi.set(__self__, "username_ldap_attribute", username_ldap_attribute)
pulumi.set(__self__, "users_dn", users_dn)
pulumi.set(__self__, "uuid_ldap_attribute", uuid_ldap_attribute)
if batch_size_for_sync is not None:
pulumi.set(__self__, "batch_size_for_sync", batch_size_for_sync)
if bind_credential is not None:
pulumi.set(__self__, "bind_credential", bind_credential)
if bind_dn is not None:
pulumi.set(__self__, "bind_dn", bind_dn)
if cache is not None:
pulumi.set(__self__, "cache", cache)
if changed_sync_period is not None:
pulumi.set(__self__, "changed_sync_period", changed_sync_period)
if connection_timeout is not None:
pulumi.set(__self__, "connection_timeout", connection_timeout)
if custom_user_search_filter is not None:
pulumi.set(__self__, "custom_user_search_filter", custom_user_search_filter)
if edit_mode is not None:
pulumi.set(__self__, "edit_mode", edit_mode)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if full_sync_period is not None:
pulumi.set(__self__, "full_sync_period", full_sync_period)
if import_enabled is not None:
pulumi.set(__self__, "import_enabled", import_enabled)
if kerberos is not None:
pulumi.set(__self__, "kerberos", kerberos)
if name is not None:
pulumi.set(__self__, "name", name)
if pagination is not None:
pulumi.set(__self__, "pagination", pagination)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if read_timeout is not None:
pulumi.set(__self__, "read_timeout", read_timeout)
if search_scope is not None:
pulumi.set(__self__, "search_scope", search_scope)
if start_tls is not None:
pulumi.set(__self__, "start_tls", start_tls)
if sync_registrations is not None:
pulumi.set(__self__, "sync_registrations", sync_registrations)
if trust_email is not None:
pulumi.set(__self__, "trust_email", trust_email)
if use_password_modify_extended_op is not None:
pulumi.set(__self__, "use_password_modify_extended_op", use_password_modify_extended_op)
if use_truststore_spi is not None:
pulumi.set(__self__, "use_truststore_spi", use_truststore_spi)
if validate_password_policy is not None:
pulumi.set(__self__, "validate_password_policy", validate_password_policy)
if vendor is not None:
pulumi.set(__self__, "vendor", vendor)
@property
@pulumi.getter(name="connectionUrl")
def connection_url(self) -> pulumi.Input[str]:
"""
Connection URL to the LDAP server.
"""
return pulumi.get(self, "connection_url")
@connection_url.setter
def connection_url(self, value: pulumi.Input[str]):
pulumi.set(self, "connection_url", value)
@property
@pulumi.getter(name="rdnLdapAttribute")
def rdn_ldap_attribute(self) -> pulumi.Input[str]:
"""
Name of the LDAP attribute to use as the relative distinguished name.
"""
return pulumi.get(self, "rdn_ldap_attribute")
@rdn_ldap_attribute.setter
def rdn_ldap_attribute(self, value: pulumi.Input[str]):
pulumi.set(self, "rdn_ldap_attribute", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Input[str]:
"""
The realm that this provider will provide user federation for.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: pulumi.Input[str]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter(name="userObjectClasses")
def user_object_classes(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
"""
return pulumi.get(self, "user_object_classes")
@user_object_classes.setter
def user_object_classes(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "user_object_classes", value)
@property
@pulumi.getter(name="usernameLdapAttribute")
def username_ldap_attribute(self) -> pulumi.Input[str]:
"""
Name of the LDAP attribute to use as the Keycloak username.
"""
return pulumi.get(self, "username_ldap_attribute")
@username_ldap_attribute.setter
def username_ldap_attribute(self, value: pulumi.Input[str]):
pulumi.set(self, "username_ldap_attribute", value)
@property
@pulumi.getter(name="usersDn")
def users_dn(self) -> pulumi.Input[str]:
"""
Full DN of LDAP tree where your users are.
"""
return pulumi.get(self, "users_dn")
@users_dn.setter
def users_dn(self, value: pulumi.Input[str]):
pulumi.set(self, "users_dn", value)
@property
@pulumi.getter(name="uuidLdapAttribute")
def uuid_ldap_attribute(self) -> pulumi.Input[str]:
"""
Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
"""
return pulumi.get(self, "uuid_ldap_attribute")
@uuid_ldap_attribute.setter
def uuid_ldap_attribute(self, value: pulumi.Input[str]):
pulumi.set(self, "uuid_ldap_attribute", value)
@property
@pulumi.getter(name="batchSizeForSync")
def batch_size_for_sync(self) -> Optional[pulumi.Input[int]]:
"""
The number of users to sync within a single transaction. Defaults to `1000`.
"""
return pulumi.get(self, "batch_size_for_sync")
@batch_size_for_sync.setter
def batch_size_for_sync(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_size_for_sync", value)
@property
@pulumi.getter(name="bindCredential")
def bind_credential(self) -> Optional[pulumi.Input[str]]:
"""
Password of LDAP admin. This attribute must be set if `bind_dn` is set.
"""
return pulumi.get(self, "bind_credential")
@bind_credential.setter
def bind_credential(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bind_credential", value)
@property
@pulumi.getter(name="bindDn")
def bind_dn(self) -> Optional[pulumi.Input[str]]:
"""
DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
"""
return pulumi.get(self, "bind_dn")
@bind_dn.setter
def bind_dn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bind_dn", value)
@property
@pulumi.getter
def cache(self) -> Optional[pulumi.Input['UserFederationCacheArgs']]:
"""
A block containing the cache settings.
"""
return pulumi.get(self, "cache")
@cache.setter
def cache(self, value: Optional[pulumi.Input['UserFederationCacheArgs']]):
pulumi.set(self, "cache", value)
@property
@pulumi.getter(name="changedSyncPeriod")
def changed_sync_period(self) -> Optional[pulumi.Input[int]]:
"""
How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
"""
return pulumi.get(self, "changed_sync_period")
@changed_sync_period.setter
def changed_sync_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "changed_sync_period", value)
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> Optional[pulumi.Input[str]]:
"""
LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "connection_timeout")
@connection_timeout.setter
def connection_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_timeout", value)
@property
@pulumi.getter(name="customUserSearchFilter")
def custom_user_search_filter(self) -> Optional[pulumi.Input[str]]:
"""
Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
"""
return pulumi.get(self, "custom_user_search_filter")
@custom_user_search_filter.setter
def custom_user_search_filter(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_user_search_filter", value)
@property
@pulumi.getter(name="editMode")
def edit_mode(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
"""
return pulumi.get(self, "edit_mode")
@edit_mode.setter
def edit_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "edit_mode", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="fullSyncPeriod")
def full_sync_period(self) -> Optional[pulumi.Input[int]]:
"""
How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
"""
return pulumi.get(self, "full_sync_period")
@full_sync_period.setter
def full_sync_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "full_sync_period", value)
@property
@pulumi.getter(name="importEnabled")
def import_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
"""
return pulumi.get(self, "import_enabled")
@import_enabled.setter
def import_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_enabled", value)
@property
@pulumi.getter
def kerberos(self) -> Optional[pulumi.Input['UserFederationKerberosArgs']]:
"""
A block containing the kerberos settings.
"""
return pulumi.get(self, "kerberos")
@kerberos.setter
def kerberos(self, value: Optional[pulumi.Input['UserFederationKerberosArgs']]):
pulumi.set(self, "kerberos", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of the provider when displayed in the console.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def pagination(self) -> Optional[pulumi.Input[bool]]:
"""
When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
"""
return pulumi.get(self, "pagination")
@pagination.setter
def pagination(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "pagination", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="readTimeout")
def read_timeout(self) -> Optional[pulumi.Input[str]]:
"""
LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "read_timeout")
@read_timeout.setter
def read_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "read_timeout", value)
@property
@pulumi.getter(name="searchScope")
def search_scope(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
"""
return pulumi.get(self, "search_scope")
@search_scope.setter
def search_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "search_scope", value)
@property
@pulumi.getter(name="startTls")
def start_tls(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
"""
return pulumi.get(self, "start_tls")
@start_tls.setter
def start_tls(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "start_tls", value)
@property
@pulumi.getter(name="syncRegistrations")
def sync_registrations(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
"""
return pulumi.get(self, "sync_registrations")
@sync_registrations.setter
def sync_registrations(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_registrations", value)
@property
@pulumi.getter(name="trustEmail")
def trust_email(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
"""
return pulumi.get(self, "trust_email")
@trust_email.setter
def trust_email(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "trust_email", value)
@property
@pulumi.getter(name="usePasswordModifyExtendedOp")
def use_password_modify_extended_op(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
"""
return pulumi.get(self, "use_password_modify_extended_op")
@use_password_modify_extended_op.setter
def use_password_modify_extended_op(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_password_modify_extended_op", value)
@property
@pulumi.getter(name="useTruststoreSpi")
def use_truststore_spi(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
"""
return pulumi.get(self, "use_truststore_spi")
@use_truststore_spi.setter
def use_truststore_spi(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "use_truststore_spi", value)
@property
@pulumi.getter(name="validatePasswordPolicy")
def validate_password_policy(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, Keycloak will validate passwords using the realm policy before updating it.
"""
return pulumi.get(self, "validate_password_policy")
@validate_password_policy.setter
def validate_password_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "validate_password_policy", value)
@property
@pulumi.getter
def vendor(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
return pulumi.get(self, "vendor")
@vendor.setter
def vendor(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vendor", value)
@pulumi.input_type
class _UserFederationState:
def __init__(__self__, *,
batch_size_for_sync: Optional[pulumi.Input[int]] = None,
bind_credential: Optional[pulumi.Input[str]] = None,
bind_dn: Optional[pulumi.Input[str]] = None,
cache: Optional[pulumi.Input['UserFederationCacheArgs']] = None,
changed_sync_period: Optional[pulumi.Input[int]] = None,
connection_timeout: Optional[pulumi.Input[str]] = None,
connection_url: Optional[pulumi.Input[str]] = None,
custom_user_search_filter: Optional[pulumi.Input[str]] = None,
edit_mode: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
full_sync_period: Optional[pulumi.Input[int]] = None,
import_enabled: Optional[pulumi.Input[bool]] = None,
kerberos: Optional[pulumi.Input['UserFederationKerberosArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
pagination: Optional[pulumi.Input[bool]] = None,
priority: Optional[pulumi.Input[int]] = None,
rdn_ldap_attribute: Optional[pulumi.Input[str]] = None,
read_timeout: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
search_scope: Optional[pulumi.Input[str]] = None,
start_tls: Optional[pulumi.Input[bool]] = None,
sync_registrations: Optional[pulumi.Input[bool]] = None,
trust_email: Optional[pulumi.Input[bool]] = None,
use_password_modify_extended_op: Optional[pulumi.Input[bool]] = None,
use_truststore_spi: Optional[pulumi.Input[str]] = None,
user_object_classes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username_ldap_attribute: Optional[pulumi.Input[str]] = None,
users_dn: Optional[pulumi.Input[str]] = None,
uuid_ldap_attribute: Optional[pulumi.Input[str]] = None,
validate_password_policy: Optional[pulumi.Input[bool]] = None,
vendor: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering UserFederation resources.
:param pulumi.Input[int] batch_size_for_sync: The number of users to sync within a single transaction. Defaults to `1000`.
:param pulumi.Input[str] bind_credential: Password of LDAP admin. This attribute must be set if `bind_dn` is set.
:param pulumi.Input[str] bind_dn: DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
:param pulumi.Input['UserFederationCacheArgs'] cache: A block containing the cache settings.
:param pulumi.Input[int] changed_sync_period: How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
:param pulumi.Input[str] connection_timeout: LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] connection_url: Connection URL to the LDAP server.
:param pulumi.Input[str] custom_user_search_filter: Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
:param pulumi.Input[str] edit_mode: Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
:param pulumi.Input[bool] enabled: When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
:param pulumi.Input[int] full_sync_period: How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
:param pulumi.Input[bool] import_enabled: When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
:param pulumi.Input['UserFederationKerberosArgs'] kerberos: A block containing the kerberos settings.
:param pulumi.Input[str] name: Display name of the provider when displayed in the console.
:param pulumi.Input[bool] pagination: When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
:param pulumi.Input[int] priority: Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
:param pulumi.Input[str] rdn_ldap_attribute: Name of the LDAP attribute to use as the relative distinguished name.
:param pulumi.Input[str] read_timeout: LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] realm_id: The realm that this provider will provide user federation for.
:param pulumi.Input[str] search_scope: Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
:param pulumi.Input[bool] start_tls: When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
:param pulumi.Input[bool] sync_registrations: When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
:param pulumi.Input[bool] trust_email: If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] use_password_modify_extended_op: When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
:param pulumi.Input[str] use_truststore_spi: Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_object_classes: Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
:param pulumi.Input[str] username_ldap_attribute: Name of the LDAP attribute to use as the Keycloak username.
:param pulumi.Input[str] users_dn: Full DN of LDAP tree where your users are.
:param pulumi.Input[str] uuid_ldap_attribute: Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
:param pulumi.Input[bool] validate_password_policy: When `true`, Keycloak will validate passwords using the realm policy before updating it.
:param pulumi.Input[str] vendor: Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
if batch_size_for_sync is not None:
pulumi.set(__self__, "batch_size_for_sync", batch_size_for_sync)
if bind_credential is not None:
pulumi.set(__self__, "bind_credential", bind_credential)
if bind_dn is not None:
pulumi.set(__self__, "bind_dn", bind_dn)
if cache is not None:
pulumi.set(__self__, "cache", cache)
if changed_sync_period is not None:
pulumi.set(__self__, "changed_sync_period", changed_sync_period)
if connection_timeout is not None:
pulumi.set(__self__, "connection_timeout", connection_timeout)
if connection_url is not None:
pulumi.set(__self__, "connection_url", connection_url)
if custom_user_search_filter is not None:
pulumi.set(__self__, "custom_user_search_filter", custom_user_search_filter)
if edit_mode is not None:
pulumi.set(__self__, "edit_mode", edit_mode)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if full_sync_period is not None:
pulumi.set(__self__, "full_sync_period", full_sync_period)
if import_enabled is not None:
pulumi.set(__self__, "import_enabled", import_enabled)
if kerberos is not None:
pulumi.set(__self__, "kerberos", kerberos)
if name is not None:
pulumi.set(__self__, "name", name)
if pagination is not None:
pulumi.set(__self__, "pagination", pagination)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if rdn_ldap_attribute is not None:
pulumi.set(__self__, "rdn_ldap_attribute", rdn_ldap_attribute)
if read_timeout is not None:
pulumi.set(__self__, "read_timeout", read_timeout)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
if search_scope is not None:
pulumi.set(__self__, "search_scope", search_scope)
if start_tls is not None:
pulumi.set(__self__, "start_tls", start_tls)
if sync_registrations is not None:
pulumi.set(__self__, "sync_registrations", sync_registrations)
if trust_email is not None:
pulumi.set(__self__, "trust_email", trust_email)
if use_password_modify_extended_op is not None:
pulumi.set(__self__, "use_password_modify_extended_op", use_password_modify_extended_op)
if use_truststore_spi is not None:
pulumi.set(__self__, "use_truststore_spi", use_truststore_spi)
if user_object_classes is not None:
pulumi.set(__self__, "user_object_classes", user_object_classes)
if username_ldap_attribute is not None:
pulumi.set(__self__, "username_ldap_attribute", username_ldap_attribute)
if users_dn is not None:
pulumi.set(__self__, "users_dn", users_dn)
if uuid_ldap_attribute is not None:
pulumi.set(__self__, "uuid_ldap_attribute", uuid_ldap_attribute)
if validate_password_policy is not None:
pulumi.set(__self__, "validate_password_policy", validate_password_policy)
if vendor is not None:
pulumi.set(__self__, "vendor", vendor)
@property
@pulumi.getter(name="batchSizeForSync")
def batch_size_for_sync(self) -> Optional[pulumi.Input[int]]:
"""
The number of users to sync within a single transaction. Defaults to `1000`.
"""
return pulumi.get(self, "batch_size_for_sync")
@batch_size_for_sync.setter
def batch_size_for_sync(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "batch_size_for_sync", value)
@property
@pulumi.getter(name="bindCredential")
def bind_credential(self) -> Optional[pulumi.Input[str]]:
"""
Password of LDAP admin. This attribute must be set if `bind_dn` is set.
"""
return pulumi.get(self, "bind_credential")
@bind_credential.setter
def bind_credential(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bind_credential", value)
@property
@pulumi.getter(name="bindDn")
def bind_dn(self) -> Optional[pulumi.Input[str]]:
"""
DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
"""
return pulumi.get(self, "bind_dn")
@bind_dn.setter
def bind_dn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bind_dn", value)
@property
@pulumi.getter
def cache(self) -> Optional[pulumi.Input['UserFederationCacheArgs']]:
"""
A block containing the cache settings.
"""
return pulumi.get(self, "cache")
@cache.setter
def cache(self, value: Optional[pulumi.Input['UserFederationCacheArgs']]):
pulumi.set(self, "cache", value)
@property
@pulumi.getter(name="changedSyncPeriod")
def changed_sync_period(self) -> Optional[pulumi.Input[int]]:
"""
How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
"""
return pulumi.get(self, "changed_sync_period")
@changed_sync_period.setter
def changed_sync_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "changed_sync_period", value)
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> Optional[pulumi.Input[str]]:
"""
LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "connection_timeout")
@connection_timeout.setter
def connection_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_timeout", value)
@property
@pulumi.getter(name="connectionUrl")
def connection_url(self) -> Optional[pulumi.Input[str]]:
"""
Connection URL to the LDAP server.
"""
return pulumi.get(self, "connection_url")
@connection_url.setter
def connection_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_url", value)
@property
@pulumi.getter(name="customUserSearchFilter")
def custom_user_search_filter(self) -> Optional[pulumi.Input[str]]:
"""
Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
"""
return pulumi.get(self, "custom_user_search_filter")
@custom_user_search_filter.setter
def custom_user_search_filter(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_user_search_filter", value)
@property
@pulumi.getter(name="editMode")
def edit_mode(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
"""
return pulumi.get(self, "edit_mode")
@edit_mode.setter
def edit_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "edit_mode", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="fullSyncPeriod")
def full_sync_period(self) -> Optional[pulumi.Input[int]]:
"""
How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
"""
return pulumi.get(self, "full_sync_period")
@full_sync_period.setter
def full_sync_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "full_sync_period", value)
@property
@pulumi.getter(name="importEnabled")
def import_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
"""
return pulumi.get(self, "import_enabled")
@import_enabled.setter
def import_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_enabled", value)
@property
@pulumi.getter
def kerberos(self) -> Optional[pulumi.Input['UserFederationKerberosArgs']]:
"""
A block containing the kerberos settings.
"""
return pulumi.get(self, "kerberos")
@kerberos.setter
def kerberos(self, value: Optional[pulumi.Input['UserFederationKerberosArgs']]):
pulumi.set(self, "kerberos", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of the provider when displayed in the console.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def pagination(self) -> Optional[pulumi.Input[bool]]:
"""
When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
"""
return pulumi.get(self, "pagination")
@pagination.setter
def pagination(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "pagination", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="rdnLdapAttribute")
def rdn_ldap_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Name of the LDAP attribute to use as the relative distinguished name.
"""
return pulumi.get(self, "rdn_ldap_attribute")
@rdn_ldap_attribute.setter
def rdn_ldap_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rdn_ldap_attribute", value)
@property
@pulumi.getter(name="readTimeout")
def read_timeout(self) -> Optional[pulumi.Input[str]]:
"""
LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "read_timeout")
@read_timeout.setter
def read_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "read_timeout", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> Optional[pulumi.Input[str]]:
"""
The realm that this provider will provide user federation for.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter(name="searchScope")
def search_scope(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
"""
return pulumi.get(self, "search_scope")
@search_scope.setter
def search_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "search_scope", value)
@property
@pulumi.getter(name="startTls")
def start_tls(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
"""
return pulumi.get(self, "start_tls")
@start_tls.setter
def start_tls(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "start_tls", value)
@property
@pulumi.getter(name="syncRegistrations")
def sync_registrations(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
"""
return pulumi.get(self, "sync_registrations")
@sync_registrations.setter
def sync_registrations(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sync_registrations", value)
@property
@pulumi.getter(name="trustEmail")
def trust_email(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
"""
return pulumi.get(self, "trust_email")
@trust_email.setter
def trust_email(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "trust_email", value)
@property
@pulumi.getter(name="usePasswordModifyExtendedOp")
def use_password_modify_extended_op(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
"""
return pulumi.get(self, "use_password_modify_extended_op")
@use_password_modify_extended_op.setter
def use_password_modify_extended_op(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_password_modify_extended_op", value)
@property
@pulumi.getter(name="useTruststoreSpi")
def use_truststore_spi(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
"""
return pulumi.get(self, "use_truststore_spi")
@use_truststore_spi.setter
def use_truststore_spi(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "use_truststore_spi", value)
@property
@pulumi.getter(name="userObjectClasses")
def user_object_classes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
"""
return pulumi.get(self, "user_object_classes")
@user_object_classes.setter
def user_object_classes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "user_object_classes", value)
@property
@pulumi.getter(name="usernameLdapAttribute")
def username_ldap_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Name of the LDAP attribute to use as the Keycloak username.
"""
return pulumi.get(self, "username_ldap_attribute")
@username_ldap_attribute.setter
def username_ldap_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username_ldap_attribute", value)
@property
@pulumi.getter(name="usersDn")
def users_dn(self) -> Optional[pulumi.Input[str]]:
"""
Full DN of LDAP tree where your users are.
"""
return pulumi.get(self, "users_dn")
@users_dn.setter
def users_dn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "users_dn", value)
@property
@pulumi.getter(name="uuidLdapAttribute")
def uuid_ldap_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
"""
return pulumi.get(self, "uuid_ldap_attribute")
@uuid_ldap_attribute.setter
def uuid_ldap_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uuid_ldap_attribute", value)
@property
@pulumi.getter(name="validatePasswordPolicy")
def validate_password_policy(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, Keycloak will validate passwords using the realm policy before updating it.
"""
return pulumi.get(self, "validate_password_policy")
@validate_password_policy.setter
def validate_password_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "validate_password_policy", value)
@property
@pulumi.getter
def vendor(self) -> Optional[pulumi.Input[str]]:
"""
Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
return pulumi.get(self, "vendor")
@vendor.setter
def vendor(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vendor", value)
class UserFederation(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
batch_size_for_sync: Optional[pulumi.Input[int]] = None,
bind_credential: Optional[pulumi.Input[str]] = None,
bind_dn: Optional[pulumi.Input[str]] = None,
cache: Optional[pulumi.Input[pulumi.InputType['UserFederationCacheArgs']]] = None,
changed_sync_period: Optional[pulumi.Input[int]] = None,
connection_timeout: Optional[pulumi.Input[str]] = None,
connection_url: Optional[pulumi.Input[str]] = None,
custom_user_search_filter: Optional[pulumi.Input[str]] = None,
edit_mode: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
full_sync_period: Optional[pulumi.Input[int]] = None,
import_enabled: Optional[pulumi.Input[bool]] = None,
kerberos: Optional[pulumi.Input[pulumi.InputType['UserFederationKerberosArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
pagination: Optional[pulumi.Input[bool]] = None,
priority: Optional[pulumi.Input[int]] = None,
rdn_ldap_attribute: Optional[pulumi.Input[str]] = None,
read_timeout: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
search_scope: Optional[pulumi.Input[str]] = None,
start_tls: Optional[pulumi.Input[bool]] = None,
sync_registrations: Optional[pulumi.Input[bool]] = None,
trust_email: Optional[pulumi.Input[bool]] = None,
use_password_modify_extended_op: Optional[pulumi.Input[bool]] = None,
use_truststore_spi: Optional[pulumi.Input[str]] = None,
user_object_classes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username_ldap_attribute: Optional[pulumi.Input[str]] = None,
users_dn: Optional[pulumi.Input[str]] = None,
uuid_ldap_attribute: Optional[pulumi.Input[str]] = None,
validate_password_policy: Optional[pulumi.Input[bool]] = None,
vendor: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows for creating and managing LDAP user federation providers within Keycloak.
Keycloak can use an LDAP user federation provider to federate users to Keycloak
from a directory system such as LDAP or Active Directory. Federated users
will exist within the realm and will be able to log in to clients. Federated
users can have their attributes defined using mappers.
## Example Usage
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
ldap_user_federation = keycloak.ldap.UserFederation("ldapUserFederation",
realm_id=realm.id,
enabled=True,
username_ldap_attribute="cn",
rdn_ldap_attribute="cn",
uuid_ldap_attribute="entryDN",
user_object_classes=[
"simpleSecurityObject",
"organizationalRole",
],
connection_url="ldap://openldap",
users_dn="dc=example,dc=org",
bind_dn="cn=admin,dc=example,dc=org",
bind_credential="admin",
connection_timeout="5s",
read_timeout="10s",
kerberos=keycloak.ldap.UserFederationKerberosArgs(
kerberos_realm="FOO.LOCAL",
server_principal="HTTP/host.foo.com@FOO.LOCAL",
key_tab="/etc/host.keytab",
))
```
## Import
LDAP user federation providers can be imported using the format `{{realm_id}}/{{ldap_user_federation_id}}`. The ID of the LDAP user federation provider can be found within the Keycloak GUI and is typically a GUIDbash
```sh
$ pulumi import keycloak:ldap/userFederation:UserFederation ldap_user_federation my-realm/af2a6ca3-e4d7-49c3-b08b-1b3c70b4b860
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] batch_size_for_sync: The number of users to sync within a single transaction. Defaults to `1000`.
:param pulumi.Input[str] bind_credential: Password of LDAP admin. This attribute must be set if `bind_dn` is set.
:param pulumi.Input[str] bind_dn: DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
:param pulumi.Input[pulumi.InputType['UserFederationCacheArgs']] cache: A block containing the cache settings.
:param pulumi.Input[int] changed_sync_period: How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
:param pulumi.Input[str] connection_timeout: LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] connection_url: Connection URL to the LDAP server.
:param pulumi.Input[str] custom_user_search_filter: Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
:param pulumi.Input[str] edit_mode: Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
:param pulumi.Input[bool] enabled: When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
:param pulumi.Input[int] full_sync_period: How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
:param pulumi.Input[bool] import_enabled: When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
:param pulumi.Input[pulumi.InputType['UserFederationKerberosArgs']] kerberos: A block containing the kerberos settings.
:param pulumi.Input[str] name: Display name of the provider when displayed in the console.
:param pulumi.Input[bool] pagination: When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
:param pulumi.Input[int] priority: Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
:param pulumi.Input[str] rdn_ldap_attribute: Name of the LDAP attribute to use as the relative distinguished name.
:param pulumi.Input[str] read_timeout: LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] realm_id: The realm that this provider will provide user federation for.
:param pulumi.Input[str] search_scope: Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
:param pulumi.Input[bool] start_tls: When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
:param pulumi.Input[bool] sync_registrations: When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
:param pulumi.Input[bool] trust_email: If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] use_password_modify_extended_op: When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
:param pulumi.Input[str] use_truststore_spi: Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_object_classes: Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
:param pulumi.Input[str] username_ldap_attribute: Name of the LDAP attribute to use as the Keycloak username.
:param pulumi.Input[str] users_dn: Full DN of LDAP tree where your users are.
:param pulumi.Input[str] uuid_ldap_attribute: Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
:param pulumi.Input[bool] validate_password_policy: When `true`, Keycloak will validate passwords using the realm policy before updating it.
:param pulumi.Input[str] vendor: Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UserFederationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows for creating and managing LDAP user federation providers within Keycloak.
Keycloak can use an LDAP user federation provider to federate users to Keycloak
from a directory system such as LDAP or Active Directory. Federated users
will exist within the realm and will be able to log in to clients. Federated
users can have their attributes defined using mappers.
## Example Usage
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
ldap_user_federation = keycloak.ldap.UserFederation("ldapUserFederation",
realm_id=realm.id,
enabled=True,
username_ldap_attribute="cn",
rdn_ldap_attribute="cn",
uuid_ldap_attribute="entryDN",
user_object_classes=[
"simpleSecurityObject",
"organizationalRole",
],
connection_url="ldap://openldap",
users_dn="dc=example,dc=org",
bind_dn="cn=admin,dc=example,dc=org",
bind_credential="admin",
connection_timeout="5s",
read_timeout="10s",
kerberos=keycloak.ldap.UserFederationKerberosArgs(
kerberos_realm="FOO.LOCAL",
server_principal="HTTP/host.foo.com@FOO.LOCAL",
key_tab="/etc/host.keytab",
))
```
## Import
LDAP user federation providers can be imported using the format `{{realm_id}}/{{ldap_user_federation_id}}`. The ID of the LDAP user federation provider can be found within the Keycloak GUI and is typically a GUIDbash
```sh
$ pulumi import keycloak:ldap/userFederation:UserFederation ldap_user_federation my-realm/af2a6ca3-e4d7-49c3-b08b-1b3c70b4b860
```
:param str resource_name: The name of the resource.
:param UserFederationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UserFederationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
batch_size_for_sync: Optional[pulumi.Input[int]] = None,
bind_credential: Optional[pulumi.Input[str]] = None,
bind_dn: Optional[pulumi.Input[str]] = None,
cache: Optional[pulumi.Input[pulumi.InputType['UserFederationCacheArgs']]] = None,
changed_sync_period: Optional[pulumi.Input[int]] = None,
connection_timeout: Optional[pulumi.Input[str]] = None,
connection_url: Optional[pulumi.Input[str]] = None,
custom_user_search_filter: Optional[pulumi.Input[str]] = None,
edit_mode: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
full_sync_period: Optional[pulumi.Input[int]] = None,
import_enabled: Optional[pulumi.Input[bool]] = None,
kerberos: Optional[pulumi.Input[pulumi.InputType['UserFederationKerberosArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
pagination: Optional[pulumi.Input[bool]] = None,
priority: Optional[pulumi.Input[int]] = None,
rdn_ldap_attribute: Optional[pulumi.Input[str]] = None,
read_timeout: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
search_scope: Optional[pulumi.Input[str]] = None,
start_tls: Optional[pulumi.Input[bool]] = None,
sync_registrations: Optional[pulumi.Input[bool]] = None,
trust_email: Optional[pulumi.Input[bool]] = None,
use_password_modify_extended_op: Optional[pulumi.Input[bool]] = None,
use_truststore_spi: Optional[pulumi.Input[str]] = None,
user_object_classes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username_ldap_attribute: Optional[pulumi.Input[str]] = None,
users_dn: Optional[pulumi.Input[str]] = None,
uuid_ldap_attribute: Optional[pulumi.Input[str]] = None,
validate_password_policy: Optional[pulumi.Input[bool]] = None,
vendor: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UserFederationArgs.__new__(UserFederationArgs)
__props__.__dict__["batch_size_for_sync"] = batch_size_for_sync
__props__.__dict__["bind_credential"] = bind_credential
__props__.__dict__["bind_dn"] = bind_dn
__props__.__dict__["cache"] = cache
__props__.__dict__["changed_sync_period"] = changed_sync_period
__props__.__dict__["connection_timeout"] = connection_timeout
if connection_url is None and not opts.urn:
raise TypeError("Missing required property 'connection_url'")
__props__.__dict__["connection_url"] = connection_url
__props__.__dict__["custom_user_search_filter"] = custom_user_search_filter
__props__.__dict__["edit_mode"] = edit_mode
__props__.__dict__["enabled"] = enabled
__props__.__dict__["full_sync_period"] = full_sync_period
__props__.__dict__["import_enabled"] = import_enabled
__props__.__dict__["kerberos"] = kerberos
__props__.__dict__["name"] = name
__props__.__dict__["pagination"] = pagination
__props__.__dict__["priority"] = priority
if rdn_ldap_attribute is None and not opts.urn:
raise TypeError("Missing required property 'rdn_ldap_attribute'")
__props__.__dict__["rdn_ldap_attribute"] = rdn_ldap_attribute
__props__.__dict__["read_timeout"] = read_timeout
if realm_id is None and not opts.urn:
raise TypeError("Missing required property 'realm_id'")
__props__.__dict__["realm_id"] = realm_id
__props__.__dict__["search_scope"] = search_scope
__props__.__dict__["start_tls"] = start_tls
__props__.__dict__["sync_registrations"] = sync_registrations
__props__.__dict__["trust_email"] = trust_email
__props__.__dict__["use_password_modify_extended_op"] = use_password_modify_extended_op
__props__.__dict__["use_truststore_spi"] = use_truststore_spi
if user_object_classes is None and not opts.urn:
raise TypeError("Missing required property 'user_object_classes'")
__props__.__dict__["user_object_classes"] = user_object_classes
if username_ldap_attribute is None and not opts.urn:
raise TypeError("Missing required property 'username_ldap_attribute'")
__props__.__dict__["username_ldap_attribute"] = username_ldap_attribute
if users_dn is None and not opts.urn:
raise TypeError("Missing required property 'users_dn'")
__props__.__dict__["users_dn"] = users_dn
if uuid_ldap_attribute is None and not opts.urn:
raise TypeError("Missing required property 'uuid_ldap_attribute'")
__props__.__dict__["uuid_ldap_attribute"] = uuid_ldap_attribute
__props__.__dict__["validate_password_policy"] = validate_password_policy
__props__.__dict__["vendor"] = vendor
super(UserFederation, __self__).__init__(
'keycloak:ldap/userFederation:UserFederation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
batch_size_for_sync: Optional[pulumi.Input[int]] = None,
bind_credential: Optional[pulumi.Input[str]] = None,
bind_dn: Optional[pulumi.Input[str]] = None,
cache: Optional[pulumi.Input[pulumi.InputType['UserFederationCacheArgs']]] = None,
changed_sync_period: Optional[pulumi.Input[int]] = None,
connection_timeout: Optional[pulumi.Input[str]] = None,
connection_url: Optional[pulumi.Input[str]] = None,
custom_user_search_filter: Optional[pulumi.Input[str]] = None,
edit_mode: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
full_sync_period: Optional[pulumi.Input[int]] = None,
import_enabled: Optional[pulumi.Input[bool]] = None,
kerberos: Optional[pulumi.Input[pulumi.InputType['UserFederationKerberosArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
pagination: Optional[pulumi.Input[bool]] = None,
priority: Optional[pulumi.Input[int]] = None,
rdn_ldap_attribute: Optional[pulumi.Input[str]] = None,
read_timeout: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
search_scope: Optional[pulumi.Input[str]] = None,
start_tls: Optional[pulumi.Input[bool]] = None,
sync_registrations: Optional[pulumi.Input[bool]] = None,
trust_email: Optional[pulumi.Input[bool]] = None,
use_password_modify_extended_op: Optional[pulumi.Input[bool]] = None,
use_truststore_spi: Optional[pulumi.Input[str]] = None,
user_object_classes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
username_ldap_attribute: Optional[pulumi.Input[str]] = None,
users_dn: Optional[pulumi.Input[str]] = None,
uuid_ldap_attribute: Optional[pulumi.Input[str]] = None,
validate_password_policy: Optional[pulumi.Input[bool]] = None,
vendor: Optional[pulumi.Input[str]] = None) -> 'UserFederation':
"""
Get an existing UserFederation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] batch_size_for_sync: The number of users to sync within a single transaction. Defaults to `1000`.
:param pulumi.Input[str] bind_credential: Password of LDAP admin. This attribute must be set if `bind_dn` is set.
:param pulumi.Input[str] bind_dn: DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
:param pulumi.Input[pulumi.InputType['UserFederationCacheArgs']] cache: A block containing the cache settings.
:param pulumi.Input[int] changed_sync_period: How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
:param pulumi.Input[str] connection_timeout: LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] connection_url: Connection URL to the LDAP server.
:param pulumi.Input[str] custom_user_search_filter: Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
:param pulumi.Input[str] edit_mode: Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
:param pulumi.Input[bool] enabled: When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
:param pulumi.Input[int] full_sync_period: How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
:param pulumi.Input[bool] import_enabled: When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
:param pulumi.Input[pulumi.InputType['UserFederationKerberosArgs']] kerberos: A block containing the kerberos settings.
:param pulumi.Input[str] name: Display name of the provider when displayed in the console.
:param pulumi.Input[bool] pagination: When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
:param pulumi.Input[int] priority: Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
:param pulumi.Input[str] rdn_ldap_attribute: Name of the LDAP attribute to use as the relative distinguished name.
:param pulumi.Input[str] read_timeout: LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
:param pulumi.Input[str] realm_id: The realm that this provider will provide user federation for.
:param pulumi.Input[str] search_scope: Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
:param pulumi.Input[bool] start_tls: When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
:param pulumi.Input[bool] sync_registrations: When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
:param pulumi.Input[bool] trust_email: If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] use_password_modify_extended_op: When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
:param pulumi.Input[str] use_truststore_spi: Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_object_classes: Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
:param pulumi.Input[str] username_ldap_attribute: Name of the LDAP attribute to use as the Keycloak username.
:param pulumi.Input[str] users_dn: Full DN of LDAP tree where your users are.
:param pulumi.Input[str] uuid_ldap_attribute: Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
:param pulumi.Input[bool] validate_password_policy: When `true`, Keycloak will validate passwords using the realm policy before updating it.
:param pulumi.Input[str] vendor: Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _UserFederationState.__new__(_UserFederationState)
__props__.__dict__["batch_size_for_sync"] = batch_size_for_sync
__props__.__dict__["bind_credential"] = bind_credential
__props__.__dict__["bind_dn"] = bind_dn
__props__.__dict__["cache"] = cache
__props__.__dict__["changed_sync_period"] = changed_sync_period
__props__.__dict__["connection_timeout"] = connection_timeout
__props__.__dict__["connection_url"] = connection_url
__props__.__dict__["custom_user_search_filter"] = custom_user_search_filter
__props__.__dict__["edit_mode"] = edit_mode
__props__.__dict__["enabled"] = enabled
__props__.__dict__["full_sync_period"] = full_sync_period
__props__.__dict__["import_enabled"] = import_enabled
__props__.__dict__["kerberos"] = kerberos
__props__.__dict__["name"] = name
__props__.__dict__["pagination"] = pagination
__props__.__dict__["priority"] = priority
__props__.__dict__["rdn_ldap_attribute"] = rdn_ldap_attribute
__props__.__dict__["read_timeout"] = read_timeout
__props__.__dict__["realm_id"] = realm_id
__props__.__dict__["search_scope"] = search_scope
__props__.__dict__["start_tls"] = start_tls
__props__.__dict__["sync_registrations"] = sync_registrations
__props__.__dict__["trust_email"] = trust_email
__props__.__dict__["use_password_modify_extended_op"] = use_password_modify_extended_op
__props__.__dict__["use_truststore_spi"] = use_truststore_spi
__props__.__dict__["user_object_classes"] = user_object_classes
__props__.__dict__["username_ldap_attribute"] = username_ldap_attribute
__props__.__dict__["users_dn"] = users_dn
__props__.__dict__["uuid_ldap_attribute"] = uuid_ldap_attribute
__props__.__dict__["validate_password_policy"] = validate_password_policy
__props__.__dict__["vendor"] = vendor
return UserFederation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="batchSizeForSync")
def batch_size_for_sync(self) -> pulumi.Output[Optional[int]]:
"""
The number of users to sync within a single transaction. Defaults to `1000`.
"""
return pulumi.get(self, "batch_size_for_sync")
@property
@pulumi.getter(name="bindCredential")
def bind_credential(self) -> pulumi.Output[Optional[str]]:
"""
Password of LDAP admin. This attribute must be set if `bind_dn` is set.
"""
return pulumi.get(self, "bind_credential")
@property
@pulumi.getter(name="bindDn")
def bind_dn(self) -> pulumi.Output[Optional[str]]:
"""
DN of LDAP admin, which will be used by Keycloak to access LDAP server. This attribute must be set if `bind_credential` is set.
"""
return pulumi.get(self, "bind_dn")
@property
@pulumi.getter
def cache(self) -> pulumi.Output[Optional['outputs.UserFederationCache']]:
"""
A block containing the cache settings.
"""
return pulumi.get(self, "cache")
@property
@pulumi.getter(name="changedSyncPeriod")
def changed_sync_period(self) -> pulumi.Output[Optional[int]]:
"""
How frequently Keycloak should sync changed LDAP users, in seconds. Omit this property to disable periodic changed users sync.
"""
return pulumi.get(self, "changed_sync_period")
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> pulumi.Output[Optional[str]]:
"""
LDAP connection timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "connection_timeout")
@property
@pulumi.getter(name="connectionUrl")
def connection_url(self) -> pulumi.Output[str]:
"""
Connection URL to the LDAP server.
"""
return pulumi.get(self, "connection_url")
@property
@pulumi.getter(name="customUserSearchFilter")
def custom_user_search_filter(self) -> pulumi.Output[Optional[str]]:
"""
Additional LDAP filter for filtering searched users. Must begin with `(` and end with `)`.
"""
return pulumi.get(self, "custom_user_search_filter")
@property
@pulumi.getter(name="editMode")
def edit_mode(self) -> pulumi.Output[Optional[str]]:
"""
Can be one of `READ_ONLY`, `WRITABLE`, or `UNSYNCED`. `UNSYNCED` allows user data to be imported but not synced back to LDAP. Defaults to `READ_ONLY`.
"""
return pulumi.get(self, "edit_mode")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `false`, this provider will not be used when performing queries for users. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="fullSyncPeriod")
def full_sync_period(self) -> pulumi.Output[Optional[int]]:
"""
How frequently Keycloak should sync all LDAP users, in seconds. Omit this property to disable periodic full sync.
"""
return pulumi.get(self, "full_sync_period")
@property
@pulumi.getter(name="importEnabled")
def import_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, LDAP users will be imported into the Keycloak database. Defaults to `true`.
"""
return pulumi.get(self, "import_enabled")
@property
@pulumi.getter
def kerberos(self) -> pulumi.Output[Optional['outputs.UserFederationKerberos']]:
"""
A block containing the kerberos settings.
"""
return pulumi.get(self, "kerberos")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Display name of the provider when displayed in the console.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def pagination(self) -> pulumi.Output[Optional[bool]]:
"""
When true, Keycloak assumes the LDAP server supports pagination. Defaults to `true`.
"""
return pulumi.get(self, "pagination")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[int]]:
"""
Priority of this provider when looking up users. Lower values are first. Defaults to `0`.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="rdnLdapAttribute")
def rdn_ldap_attribute(self) -> pulumi.Output[str]:
"""
Name of the LDAP attribute to use as the relative distinguished name.
"""
return pulumi.get(self, "rdn_ldap_attribute")
@property
@pulumi.getter(name="readTimeout")
def read_timeout(self) -> pulumi.Output[Optional[str]]:
"""
LDAP read timeout in the format of a [Go duration string](https://golang.org/pkg/time/#Duration.String).
"""
return pulumi.get(self, "read_timeout")
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Output[str]:
"""
The realm that this provider will provide user federation for.
"""
return pulumi.get(self, "realm_id")
@property
@pulumi.getter(name="searchScope")
def search_scope(self) -> pulumi.Output[Optional[str]]:
"""
Can be one of `ONE_LEVEL` or `SUBTREE`:
- `ONE_LEVEL`: Only search for users in the DN specified by `user_dn`.
- `SUBTREE`: Search entire LDAP subtree.
"""
return pulumi.get(self, "search_scope")
@property
@pulumi.getter(name="startTls")
def start_tls(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, Keycloak will encrypt the connection to LDAP using STARTTLS, which will disable connection pooling.
"""
return pulumi.get(self, "start_tls")
@property
@pulumi.getter(name="syncRegistrations")
def sync_registrations(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, newly created users will be synced back to LDAP. Defaults to `false`.
"""
return pulumi.get(self, "sync_registrations")
@property
@pulumi.getter(name="trustEmail")
def trust_email(self) -> pulumi.Output[Optional[bool]]:
"""
If enabled, email provided by this provider is not verified even if verification is enabled for the realm.
"""
return pulumi.get(self, "trust_email")
@property
@pulumi.getter(name="usePasswordModifyExtendedOp")
def use_password_modify_extended_op(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, use the LDAPv3 Password Modify Extended Operation (RFC-3062).
"""
return pulumi.get(self, "use_password_modify_extended_op")
@property
@pulumi.getter(name="useTruststoreSpi")
def use_truststore_spi(self) -> pulumi.Output[Optional[str]]:
"""
Can be one of `ALWAYS`, `ONLY_FOR_LDAPS`, or `NEVER`:
"""
return pulumi.get(self, "use_truststore_spi")
@property
@pulumi.getter(name="userObjectClasses")
def user_object_classes(self) -> pulumi.Output[Sequence[str]]:
"""
Array of all values of LDAP objectClass attribute for users in LDAP. Must contain at least one.
"""
return pulumi.get(self, "user_object_classes")
@property
@pulumi.getter(name="usernameLdapAttribute")
def username_ldap_attribute(self) -> pulumi.Output[str]:
"""
Name of the LDAP attribute to use as the Keycloak username.
"""
return pulumi.get(self, "username_ldap_attribute")
@property
@pulumi.getter(name="usersDn")
def users_dn(self) -> pulumi.Output[str]:
"""
Full DN of LDAP tree where your users are.
"""
return pulumi.get(self, "users_dn")
@property
@pulumi.getter(name="uuidLdapAttribute")
def uuid_ldap_attribute(self) -> pulumi.Output[str]:
"""
Name of the LDAP attribute to use as a unique object identifier for objects in LDAP.
"""
return pulumi.get(self, "uuid_ldap_attribute")
@property
@pulumi.getter(name="validatePasswordPolicy")
def validate_password_policy(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, Keycloak will validate passwords using the realm policy before updating it.
"""
return pulumi.get(self, "validate_password_policy")
@property
@pulumi.getter
def vendor(self) -> pulumi.Output[Optional[str]]:
"""
Can be one of `OTHER`, `EDIRECTORY`, `AD`, `RHDS`, or `TIVOLI`. When this is selected in the GUI, it provides reasonable defaults for other fields. When used with the Keycloak API, this attribute does nothing, but is still required. Defaults to `OTHER`.
"""
return pulumi.get(self, "vendor")
| 50.023339
| 294
| 0.665411
| 10,320
| 83,589
| 5.173837
| 0.035659
| 0.086527
| 0.091808
| 0.051916
| 0.966738
| 0.957973
| 0.950144
| 0.942278
| 0.936997
| 0.91868
| 0
| 0.00176
| 0.231849
| 83,589
| 1,670
| 295
| 50.053293
| 0.829806
| 0.355346
| 0
| 0.866321
| 1
| 0
| 0.120295
| 0.036333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167876
| false
| 0.054922
| 0.034197
| 0
| 0.302591
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
1a0dd2a399ab6b26cf3585152f508b1ab260e7e2
| 3,385
|
py
|
Python
|
src/model/player_pair_test.py
|
cpatrasciuc/schnapsen-card-game
|
e5131ae91c71d341968d682fd625aff3f97cc516
|
[
"BSD-3-Clause"
] | null | null | null |
src/model/player_pair_test.py
|
cpatrasciuc/schnapsen-card-game
|
e5131ae91c71d341968d682fd625aff3f97cc516
|
[
"BSD-3-Clause"
] | 18
|
2021-06-03T01:54:51.000Z
|
2022-03-30T06:12:08.000Z
|
src/model/player_pair_test.py
|
cpatrasciuc/schnapsen-card-game
|
e5131ae91c71d341968d682fd625aff3f97cc516
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2021 Cristian Patrasciuc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from typing import List
from model.player_id import PlayerId
from model.player_pair import PlayerPair
class PlayerPairTest(unittest.TestCase):
def test_pair_of_ints(self):
test_pair: PlayerPair[int] = PlayerPair()
self.assertIsNone(test_pair.one)
self.assertIsNone(test_pair.two)
test_pair.one = 100
self.assertEqual(100, test_pair.one)
self.assertIsNone(test_pair.two)
test_pair.two = 200
self.assertEqual(100, test_pair.one)
self.assertEqual(200, test_pair.two)
another_test_pair: PlayerPair[int] = PlayerPair()
self.assertIsNone(another_test_pair.one)
self.assertIsNone(another_test_pair.two)
self.assertEqual(100, test_pair.one)
self.assertEqual(200, test_pair.two)
another_test_pair.one = 300
self.assertEqual(300, another_test_pair.one)
self.assertIsNone(another_test_pair.two)
self.assertEqual(100, test_pair.one)
self.assertEqual(200, test_pair.two)
def test_pair_of_lists(self):
test_pair: PlayerPair[List[int]] = PlayerPair()
self.assertIsNone(test_pair.one)
self.assertIsNone(test_pair.two)
test_pair.one = [1, 2, 3]
self.assertEqual([1, 2, 3], test_pair.one)
self.assertIsNone(test_pair.two)
test_pair.two = [2, 3, 4]
self.assertEqual([1, 2, 3], test_pair.one)
self.assertEqual([2, 3, 4], test_pair.two)
another_test_pair: PlayerPair[List[int]] = PlayerPair()
self.assertIsNone(another_test_pair.one)
self.assertIsNone(another_test_pair.two)
self.assertEqual([1, 2, 3], test_pair.one)
self.assertEqual([2, 3, 4], test_pair.two)
another_test_pair.one = [10, 20, 30]
self.assertEqual([10, 20, 30], another_test_pair.one)
self.assertIsNone(another_test_pair.two)
self.assertEqual([1, 2, 3], test_pair.one)
self.assertEqual([2, 3, 4], test_pair.two)
def test_index_by_player_id(self):
test_pair: PlayerPair[int] = PlayerPair(123, 345)
self.assertEqual(123, test_pair.one)
self.assertEqual(123, test_pair[PlayerId.ONE])
self.assertEqual(345, test_pair.two)
self.assertEqual(345, test_pair[PlayerId.TWO])
test_pair[PlayerId.ONE] = 678
self.assertEqual(678, test_pair.one)
self.assertEqual(678, test_pair[PlayerId.ONE])
self.assertEqual(345, test_pair.two)
self.assertEqual(345, test_pair[PlayerId.TWO])
test_pair[PlayerId.TWO] = 90
self.assertEqual(678, test_pair.one)
self.assertEqual(678, test_pair[PlayerId.ONE])
self.assertEqual(90, test_pair.two)
self.assertEqual(90, test_pair[PlayerId.TWO])
def test_cannot_index_by_other_types(self):
test_pair: PlayerPair[int] = PlayerPair(123, 345)
with self.assertRaisesRegex(TypeError, "Keys must be of type PlayerId"):
# noinspection PyTypeChecker
print(test_pair[1])
with self.assertRaisesRegex(TypeError, "Keys must be of type PlayerId"):
# noinspection PyTypeChecker
test_pair[1] = 100
with self.assertRaisesRegex(TypeError, "Keys must be of type PlayerId"):
# noinspection PyTypeChecker
print(test_pair["string key"])
with self.assertRaisesRegex(TypeError, "Keys must be of type PlayerId"):
# noinspection PyTypeChecker
test_pair["string key"] = 100
| 39.360465
| 76
| 0.725554
| 481
| 3,385
| 4.925156
| 0.168399
| 0.202617
| 0.09751
| 0.10764
| 0.811735
| 0.769945
| 0.768257
| 0.746306
| 0.696496
| 0.688898
| 0
| 0.047552
| 0.1613
| 3,385
| 85
| 77
| 39.823529
| 0.786897
| 0.079173
| 0
| 0.56338
| 0
| 0
| 0.043758
| 0
| 0
| 0
| 0
| 0
| 0.619718
| 1
| 0.056338
| false
| 0
| 0.056338
| 0
| 0.126761
| 0.028169
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a14fcd8fd2dae52cb3841657c7c0270129cf37b
| 24,854
|
py
|
Python
|
domonit/stats.py
|
riley022/DoMonit
|
4ba0e9b145db03db579e38a126929c29f6f64e6c
|
[
"MIT"
] | null | null | null |
domonit/stats.py
|
riley022/DoMonit
|
4ba0e9b145db03db579e38a126929c29f6f64e6c
|
[
"MIT"
] | 1
|
2018-05-22T09:50:35.000Z
|
2018-05-22T09:50:35.000Z
|
domonit/stats.py
|
riley022/DoMonit
|
4ba0e9b145db03db579e38a126929c29f6f64e6c
|
[
"MIT"
] | null | null | null |
import requests_unixsocket
import json
from utils.utils import Utils
u = Utils()
#https://docs.docker.com/engine/reference/api/docker_remote_api_v1.24/
class Stats():
def __init__(self, container_id, stream = "0"):
self.container_id = container_id
self.stream = stream
self.base = "http+unix://%2Fvar%2Frun%2Fdocker.sock"
self.url = "/containers/%s/stats?stream=%s" % (self.container_id, self.stream)
self.session = requests_unixsocket.Session()
try:
self.resp = self.session.get( self.base + self.url)
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
print message
def stats(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return respj
def read(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["read"]) )
return "test"
def pids_stats_current(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["pids_stats"]["current"]) )
#for multi networking inside a container : https://github.com/docker/docker/issues/17750
def networks(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["networks"] ) )
def interfaces(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["networks"].keys() ) )
def rx_bytes(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["rx_bytes"] ) )
def rx_dropped(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["rx_dropped"] ) )
def rx_errors(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["rx_errors"] ) )
def rx_packets(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["rx_packets"] ) )
def tx_bytes(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["tx_bytes"] ) )
def tx_dropped(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["tx_dropped"] ) )
def tx_errors(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["tx_errors"] ) )
def tx_packets(self, interface):
resp = self.resp
self.interface = interface
if resp.status_code == 404:
raise NoSuchContainerError('GET ' + self.url + ' {} '.format(resp.status_code))
elif resp.status_code == 500:
raise ServerErrorError('GET ' + self.url + ' {} '.format(resp.status_code))
respj = self.resp.json()
return ('{}'.format( respj["networks"][interface]["tx_packets"] ) )
# Memory Stats
def memory_stats(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"] ) )
def memory_stats_stats_unevictable(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["unevictable"] ) )
def memory_stats_stats_total_inactive_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_inactive_file"] ) )
def memory_stats_stats_total_rss_huge(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_rss_huge"] ) )
def memory_stats_stats_writeback(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["writeback"] ) )
def memory_stats_stats_total_cache(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_cache"] ) )
def memory_stats_stats_total_mapped_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_mapped_file"] ) )
def memory_stats_stats_mapped_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["mapped_file"] ) )
def memory_stats_stats_pgfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["pgfault"] ) )
def memory_stats_stats_total_writeback(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_writeback"] ) )
def memory_stats_stats_hierarchical_memory_limit(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["hierarchical_memory_limit"] ) )
def memory_stats_stats_total_active_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_active_file"] ) )
def memory_stats_stats_rss_huge(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["rss_huge"] ) )
def memory_stats_stats_cache(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["cache"] ) )
def memory_stats_stats_active_anon(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["active_anon"] ) )
def memory_stats_stats_pgmajfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["pgmajfault"] ) )
def memory_stats_stats_total_pgpgout(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgpgout"] ) )
def memory_stats_stats_pgpgout(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["pgpgout"] ) )
def memory_stats_stats_total_active_anon(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_active_anon"] ) )
def memory_stats_stats_total_unevictable(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_unevictable"] ) )
def memory_stats_stats_total_pgfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgfault"] ) )
def memory_stats_stats_total_pgmajfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgmajfault"] ) )
def memory_stats_stats_total_inactive_anon(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_inactive_anon"] ) )
def memory_stats_stats_total_unevictable(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_unevictable"] ) )
def memory_stats_stats_total_pgfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgfault"] ) )
def memory_stats_stats_total_pgmajfault(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgmajfault"] ) )
def memory_stats_stats_total_inactive_anon(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_inactive_anon"] ) )
def memory_stats_stats_inactive_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["inactive_file"] ) )
def memory_stats_stats_pgpgin(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["pgpgin"] ) )
def memory_stats_stats_total_pgpgin(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_pgpgin"] ) )
def memory_stats_stats_rss(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["rss"] ) )
def memory_stats_stats_active_file(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["active_file"] ) )
def memory_stats_stats_inactive_anon(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["inactive_anon"] ) )
def memory_stats_stats_total_rss(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["stats"]["total_rss"] ) )
def memory_stats_stats_max_usage(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["max_usage"] ) )
def memory_stats_usage(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["usage"] ) )
def memory_stats_failcnt(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["failcnt"] ) )
def memory_stats_limit(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["memory_stats"]["limit"] ) )
# blkio_stats ToDo: io_service_time_recursive sectors_recursive io_service_bytes_recursive io_time_recursive io_queue_recursive io_merged_recursive io_wait_time_recursive
def blkio_stats(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["blkio_stats"] ) )
# CPU
def cpu_stats_cpu_stats(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"] ) )
def cpu_stats_usage_in_usermode(self):
"""
Time spent by tasks of the cgroup in user mode. Units: nanoseconds.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["cpu_usage"]["usage_in_usermode"] ) )
def cpu_stats_total_usage(self):
"""
Total CPU time consumed. Units: nanoseconds.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["cpu_usage"]["total_usage"] ) )
def cpu_stats_percpu_usage(self):
"""
Total CPU time consumed per core. Units: nanoseconds.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["cpu_usage"]["percpu_usage"] ) )
def cpu_stats_usage_in_kernelmode(self):
"""
Time spent by tasks of the cgroup in kernel mode. Units: nanoseconds.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["cpu_usage"]["usage_in_kernelmode"] ) )
def cpu_stats_system_cpu_usage(self):
"""
returns the host''s cumulative CPU usage (for user, system, idle, etc) in nanoseconds
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["system_cpu_usage"] ) )
def cpu_stats_throttling_data(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["throttling_data"] ) )
def cpu_stats_period(self):
"""
Number of periods with throttling active
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["throttling_data"]["periods"] ) )
def cpu_stats_throttled_periods(self):
"""
Number of periods when the container hits its throttling limit.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["throttling_data"]["throttled_periods"] ) )
def cpu_stats_throttled_time(self):
"""
Aggregate time the container was throttled for in nanoseconds.
"""
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["cpu_stats"]["throttling_data"]["throttled_time"] ) )
# Per CPU
def percpu_stats(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["precpu_stats"] ) )
def percpu_usage_in_usermode(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["cpu_usage"]["usage_in_usermode"] ) )
def percpu_total_usage(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["cpu_usage"]["total_usage"] ) )
def percpu_percpu_usage(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["cpu_usage"]["percpu_usage"] ) )
def percpu_usage_in_kernelmode(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["cpu_usage"]["usage_in_kernelmode"] ) )
def percpu_system_cpu_usage(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["system_cpu_usage"] ) )
def percpu_throttling_data(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["throttling_data"] ) )
def percpu_period(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["throttling_data"]["periods"] ) )
def percpu_throttled_periods(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["throttling_data"]["throttled_periods"] ) )
def percpu_throttled_time(self):
resp = self.resp
url = self.url
resp_status_code = resp.status_code
u.check_resp(resp_status_code, url)
respj = self.resp.json()
return ('{}'.format( respj["percpu_stats"]["throttling_data"]["throttled_time"] ) )
| 29.413018
| 174
| 0.588718
| 2,975
| 24,854
| 4.651092
| 0.053782
| 0.161885
| 0.226639
| 0.088458
| 0.894269
| 0.866228
| 0.824167
| 0.818675
| 0.818675
| 0.813905
| 0
| 0.003491
| 0.285427
| 24,854
| 844
| 175
| 29.447867
| 0.775619
| 0.014082
| 0
| 0.728625
| 0
| 0
| 0.09528
| 0.003888
| 0
| 0
| 0
| 0.001185
| 0
| 0
| null | null | 0
| 0.005576
| null | null | 0.001859
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c530495d4213db437a804d74c6fe4ade8f791660
| 80,370
|
py
|
Python
|
alibabacloud/clients/cdn_20141111.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 21
|
2018-12-20T07:34:13.000Z
|
2020-03-05T14:32:08.000Z
|
alibabacloud/clients/cdn_20141111.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 22
|
2018-12-21T13:22:33.000Z
|
2020-06-29T08:37:09.000Z
|
alibabacloud/clients/cdn_20141111.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 12
|
2018-12-29T05:45:55.000Z
|
2022-01-05T09:59:30.000Z
|
# Copyright 2019 Alibaba Cloud Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from alibabacloud.client import AlibabaCloudClient
from alibabacloud.request import APIRequest
class CdnClient(AlibabaCloudClient):
def __init__(self, client_config, credentials_provider=None, retry_policy=None,
endpoint_resolver=None):
AlibabaCloudClient.__init__(self, client_config,
credentials_provider=credentials_provider,
retry_policy=retry_policy,
endpoint_resolver=endpoint_resolver)
self.product_code = 'Cdn'
self.api_version = '2014-11-11'
self.location_service_code = 'None'
self.location_endpoint_type = 'openAPI'
def describe_l2_vips_by_dynamic_domain(self, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeL2VipsByDynamicDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_src_traffic_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeSrcTrafficData',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_src_bps_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeSrcBpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_http_code_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeHttpCodeData',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cdn_user_quota(self, security_token=None, owner_id=None):
api_request = APIRequest('DescribeCdnUserQuota', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def update_fc_trigger(
self,
notes=None,
trigger_arn=None,
source_arn=None,
owner_id=None,
role_arn=None,
function_arn=None):
api_request = APIRequest('UpdateFCTrigger', 'GET', 'http', 'RPC', 'body')
api_request._params = {
"Notes": notes,
"TriggerARN": trigger_arn,
"SourceARN": source_arn,
"OwnerId": owner_id,
"RoleARN": role_arn,
"FunctionARN": function_arn}
return self._handle_request(api_request).result
def describe_fc_trigger(self, trigger_arn=None, owner_id=None):
api_request = APIRequest('DescribeFCTrigger', 'GET', 'http', 'RPC', 'query')
api_request._params = {"TriggerARN": trigger_arn, "OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_fc_trigger(self, trigger_arn=None, owner_id=None):
api_request = APIRequest('DeleteFCTrigger', 'GET', 'http', 'RPC', 'query')
api_request._params = {"TriggerARN": trigger_arn, "OwnerId": owner_id}
return self._handle_request(api_request).result
def add_fc_trigger(
self,
notes=None,
event_meta_version=None,
trigger_arn=None,
source_arn=None,
owner_id=None,
role_arn=None,
event_meta_name=None,
function_arn=None):
api_request = APIRequest('AddFCTrigger', 'GET', 'http', 'RPC', 'body')
api_request._params = {
"Notes": notes,
"EventMetaVersion": event_meta_version,
"TriggerARN": trigger_arn,
"SourceARN": source_arn,
"OwnerId": owner_id,
"RoleARN": role_arn,
"EventMetaName": event_meta_name,
"FunctionARN": function_arn}
return self._handle_request(api_request).result
def describe_domain_certificate_info(self, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeDomainCertificateInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cdn_domain_configs(
self,
function_names=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DescribeCdnDomainConfigs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"FunctionNames": function_names,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_cname(self, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeDomainCname', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_user_custom_log_config(self, owner_id=None):
api_request = APIRequest('DescribeUserCustomLogConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_waiting_room_config(
self,
wait_url=None,
wait_uri=None,
max_time_wait=None,
domain_name=None,
allow_pct=None,
gap_time=None,
owner_id=None):
api_request = APIRequest('SetWaitingRoomConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"WaitUrl": wait_url,
"WaitUri": wait_uri,
"MaxTimeWait": max_time_wait,
"DomainName": domain_name,
"AllowPct": allow_pct,
"GapTime": gap_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_req_hit_rate_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeReqHitRateData',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_qps_data(
self,
location_name_en=None,
isp_name_en=None,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeQpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"IspNameEn": isp_name_en,
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_byte_hit_rate_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeByteHitRateData',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_real_time_bps_data(
self,
location_name_en=None,
isp_name_en=None,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRealTimeBpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"IspNameEn": isp_name_en,
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_slow_ratio(
self,
start_time=None,
page_number=None,
page_size=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainSlowRatio', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"PageNumber": page_number,
"PageSize": page_size,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_user_custom_log_config(self, owner_id=None, config_id=None, tag=None):
api_request = APIRequest('ModifyUserCustomLogConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {"OwnerId": owner_id, "ConfigId": config_id, "Tag": tag}
return self._handle_request(api_request).result
def modify_domain_custom_log_config(self, domain_name=None, owner_id=None, config_id=None):
api_request = APIRequest('ModifyDomainCustomLogConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"DomainName": domain_name,
"OwnerId": owner_id,
"ConfigId": config_id}
return self._handle_request(api_request).result
def list_domains_by_log_config_id(self, owner_id=None, config_id=None):
api_request = APIRequest('ListDomainsByLogConfigId', 'GET', 'http', 'RPC', 'query')
api_request._params = {"OwnerId": owner_id, "ConfigId": config_id}
return self._handle_request(api_request).result
def describe_domain_custom_log_config(self, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeDomainCustomLogConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_custom_log_config(self, owner_id=None, config_id=None):
api_request = APIRequest('DescribeCustomLogConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {"OwnerId": owner_id, "ConfigId": config_id}
return self._handle_request(api_request).result
def set_ip_allow_list_config(
self,
allow_ips=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetIpAllowListConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AllowIps": allow_ips,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_user_green_manager_config(
self,
security_token=None,
quota=None,
owner_id=None,
ratio=None):
api_request = APIRequest('SetUserGreenManagerConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Quota": quota,
"OwnerId": owner_id,
"Ratio": ratio}
return self._handle_request(api_request).result
def set_domain_green_manager_config(self, domain_name=None, owner_id=None, enable=None):
api_request = APIRequest('SetDomainGreenManagerConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id, "Enable": enable}
return self._handle_request(api_request).result
def set_https_option_config(
self,
security_token=None,
domain_name=None,
http2=None,
owner_id=None):
api_request = APIRequest('SetHttpsOptionConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"Http2": http2,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_l2_oss_key_config(
self,
security_token=None,
domain_name=None,
owner_id=None,
private_oss_auth=None):
api_request = APIRequest('SetL2OssKeyConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id,
"PrivateOssAuth": private_oss_auth}
return self._handle_request(api_request).result
def describe_live_stream_bit_rate_data(
self,
app_name=None,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamBitRateData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_domain_average_response_time(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_type=None,
out_string=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainAverageResponseTime', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainType": domain_type,
"OutString": out_string,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def set_ip_black_list_config(
self,
security_token=None,
domain_name=None,
owner_id=None,
block_ips=None):
api_request = APIRequest('SetIpBlackListConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id,
"BlockIps": block_ips}
return self._handle_request(api_request).result
def describe_domain_path_data(
self,
start_time=None,
page_number=None,
path=None,
page_size=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainPathData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"PageNumber": page_number,
"Path": path,
"PageSize": page_size,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_remove_query_string_config(
self,
keep_oss_args=None,
security_token=None,
domain_name=None,
ali_remove_args=None,
owner_id=None):
api_request = APIRequest('SetRemoveQueryStringConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"KeepOssArgs": keep_oss_args,
"SecurityToken": security_token,
"DomainName": domain_name,
"AliRemoveArgs": ali_remove_args,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_user_customer_labels(self, uid=None, security_token=None, owner_id=None):
api_request = APIRequest('DescribeUserCustomerLabels', 'GET', 'http', 'RPC', 'query')
api_request._params = {"Uid": uid, "SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def set_dynamic_config(
self,
dynamic_origin=None,
static_type=None,
security_token=None,
static_uri=None,
domain_name=None,
static_path=None,
dynamic_cache_control=None,
owner_id=None):
api_request = APIRequest('SetDynamicConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"DynamicOrigin": dynamic_origin,
"StaticType": static_type,
"SecurityToken": security_token,
"StaticUri": static_uri,
"DomainName": domain_name,
"StaticPath": static_path,
"DynamicCacheControl": dynamic_cache_control,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_req_header_config(
self,
security_token=None,
config_id=None,
domain_name=None,
owner_id=None,
value=None,
key=None):
api_request = APIRequest('SetReqHeaderConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ConfigId": config_id,
"DomainName": domain_name,
"OwnerId": owner_id,
"Value": value,
"Key": key}
return self._handle_request(api_request).result
def describe_live_pull_stream_config(
self,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DescribeLivePullStreamConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domains_by_source(self, sources=None, security_token=None, owner_id=None):
api_request = APIRequest('DescribeDomainsBySource', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Sources": sources,
"SecurityToken": security_token,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_live_domain_mapping(
self,
pull_domain=None,
security_token=None,
push_domain=None,
owner_id=None):
api_request = APIRequest('DeleteLiveDomainMapping', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PullDomain": pull_domain,
"SecurityToken": security_token,
"PushDomain": push_domain,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_live_domain_mapping(
self,
pull_domain=None,
security_token=None,
push_domain=None,
owner_id=None):
api_request = APIRequest('AddLiveDomainMapping', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PullDomain": pull_domain,
"SecurityToken": security_token,
"PushDomain": push_domain,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_l2_vips_by_domain(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeL2VipsByDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_http_error_page_config(
self,
security_token=None,
domain_name=None,
page_url=None,
owner_id=None,
error_code=None):
api_request = APIRequest('SetHttpErrorPageConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"PageUrl": page_url,
"OwnerId": owner_id,
"ErrorCode": error_code}
return self._handle_request(api_request).result
def delete_specific_config(
self,
security_token=None,
function_name=None,
config_id=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteSpecificConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"FunctionName": function_name,
"ConfigId": config_id,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def batch_set_cdn_domain_config(
self,
functions=None,
security_token=None,
domain_names=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('BatchSetCdnDomainConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Functions": functions,
"SecurityToken": security_token,
"DomainNames": domain_names,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def batch_delete_cdn_domain_config(
self,
function_names=None,
security_token=None,
domain_names=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('BatchDeleteCdnDomainConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"FunctionNames": function_names,
"SecurityToken": security_token,
"DomainNames": domain_names,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_range_data_by_locate_and_isp_service(
self,
domain_names=None,
location_names=None,
start_time=None,
isp_names=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeRangeDataByLocateAndIspService',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"DomainNames": domain_names,
"LocationNames": location_names,
"StartTime": start_time,
"IspNames": isp_names,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_live_app_snapshot_config(
self,
time_interval=None,
oss_bucket=None,
app_name=None,
security_token=None,
domain_name=None,
oss_endpoint=None,
sequence_oss_object=None,
overwrite_oss_object=None,
owner_id=None):
api_request = APIRequest('UpdateLiveAppSnapshotConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TimeInterval": time_interval,
"OssBucket": oss_bucket,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OssEndpoint": oss_endpoint,
"SequenceOssObject": sequence_oss_object,
"OverwriteOssObject": overwrite_oss_object,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_live_stream_snapshot_info(
self,
app_name=None,
security_token=None,
domain_name=None,
limit=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamSnapshotInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"Limit": limit,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_snapshot_config(
self,
app_name=None,
security_token=None,
domain_name=None,
page_size=None,
owner_id=None,
page_num=None,
stream_name=None,
order=None):
api_request = APIRequest('DescribeLiveSnapshotConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNum": page_num,
"StreamName": stream_name,
"Order": order}
return self._handle_request(api_request).result
def delete_live_app_snapshot_config(
self,
app_name=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteLiveAppSnapshotConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_live_app_snapshot_config(
self,
time_interval=None,
oss_bucket=None,
app_name=None,
security_token=None,
domain_name=None,
oss_endpoint=None,
sequence_oss_object=None,
overwrite_oss_object=None,
owner_id=None):
api_request = APIRequest('AddLiveAppSnapshotConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TimeInterval": time_interval,
"OssBucket": oss_bucket,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OssEndpoint": oss_endpoint,
"SequenceOssObject": sequence_oss_object,
"OverwriteOssObject": overwrite_oss_object,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def stop_mix_streams_service(
self,
security_token=None,
main_domain_name=None,
mix_stream_name=None,
mix_domain_name=None,
owner_id=None,
main_app_name=None,
mix_app_name=None,
main_stream_name=None):
api_request = APIRequest('StopMixStreamsService', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"MainDomainName": main_domain_name,
"MixStreamName": mix_stream_name,
"MixDomainName": mix_domain_name,
"OwnerId": owner_id,
"MainAppName": main_app_name,
"MixAppName": mix_app_name,
"MainStreamName": main_stream_name}
return self._handle_request(api_request).result
def start_mix_streams_service(
self,
mix_type=None,
security_token=None,
main_domain_name=None,
mix_stream_name=None,
mix_template=None,
mix_domain_name=None,
owner_id=None,
main_app_name=None,
mix_app_name=None,
main_stream_name=None):
api_request = APIRequest('StartMixStreamsService', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"MixType": mix_type,
"SecurityToken": security_token,
"MainDomainName": main_domain_name,
"MixStreamName": mix_stream_name,
"MixTemplate": mix_template,
"MixDomainName": mix_domain_name,
"OwnerId": owner_id,
"MainAppName": main_app_name,
"MixAppName": mix_app_name,
"MainStreamName": main_stream_name}
return self._handle_request(api_request).result
def describe_domain_bps_data_by_time_stamp(
self,
location_names=None,
isp_names=None,
domain_name=None,
owner_id=None,
time_point=None):
api_request = APIRequest('DescribeDomainBpsDataByTimeStamp', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNames": location_names,
"IspNames": isp_names,
"DomainName": domain_name,
"OwnerId": owner_id,
"TimePoint": time_point}
return self._handle_request(api_request).result
def describe_domain_max95_bps_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainMax95BpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_live_streams_frame_rate_and_bit_rate_data(
self,
app_name=None,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest(
'DescribeLiveStreamsFrameRateAndBitRateData',
'GET',
'http',
'RPC',
'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_stream_record_index_files(
self,
app_name=None,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamRecordIndexFiles',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_stream_record_index_file(
self,
record_id=None,
app_name=None,
security_token=None,
domain_name=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamRecordIndexFile', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"RecordId": record_id,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_stream_record_content(
self,
app_name=None,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamRecordContent', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_record_config(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeLiveRecordConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_live_app_record_config(
self,
app_name=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteLiveAppRecordConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_live_stream_record_index_files(
self,
oss_bucket=None,
app_name=None,
security_token=None,
domain_name=None,
oss_endpoint=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None,
oss_object=None):
api_request = APIRequest('CreateLiveStreamRecordIndexFiles', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"OssBucket": oss_bucket,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OssEndpoint": oss_endpoint,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name,
"OssObject": oss_object}
return self._handle_request(api_request).result
def add_live_app_record_config(
self,
oss_bucket=None,
app_name=None,
security_token=None,
domain_name=None,
oss_endpoint=None,
oss_object_prefix=None,
owner_id=None):
api_request = APIRequest('AddLiveAppRecordConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"OssBucket": oss_bucket,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"OssEndpoint": oss_endpoint,
"OssObjectPrefix": oss_object_prefix,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_forward_scheme_config(
self,
scheme_origin=None,
scheme_origin_port=None,
security_token=None,
enable=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetForwardSchemeConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SchemeOrigin": scheme_origin,
"SchemeOriginPort": scheme_origin_port,
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_user_configs(self, security_token=None, owner_id=None, config=None):
api_request = APIRequest('DescribeUserConfigs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"OwnerId": owner_id,
"Config": config}
return self._handle_request(api_request).result
def describe_domain_req_hit_rate_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainReqHitRateData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_cdn_region_and_isp(self, security_token=None, owner_id=None):
api_request = APIRequest('DescribeCdnRegionAndIsp', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_live_stream_transcode_info(
self,
security_token=None,
owner_id=None,
domain_transcode_name=None):
api_request = APIRequest('DescribeLiveStreamTranscodeInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"OwnerId": owner_id,
"DomainTranscodeName": domain_transcode_name}
return self._handle_request(api_request).result
def delete_live_stream_transcode(
self,
template=None,
app=None,
security_token=None,
owner_account=None,
domain=None,
owner_id=None):
api_request = APIRequest('DeleteLiveStreamTranscode', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Template": template,
"App": app,
"SecurityToken": security_token,
"OwnerAccount": owner_account,
"Domain": domain,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_live_stream_transcode(
self,
template=None,
app=None,
security_token=None,
owner_account=None,
domain=None,
record=None,
owner_id=None,
snapshot=None):
api_request = APIRequest('AddLiveStreamTranscode', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Template": template,
"App": app,
"SecurityToken": security_token,
"OwnerAccount": owner_account,
"Domain": domain,
"Record": record,
"OwnerId": owner_id,
"Snapshot": snapshot}
return self._handle_request(api_request).result
def set_force_redirect_config(
self,
security_token=None,
domain_name=None,
redirect_type=None,
owner_id=None):
api_request = APIRequest('SetForceRedirectConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"RedirectType": redirect_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_top_domains_by_flow(
self,
start_time=None,
limit=None,
product=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeTopDomainsByFlow', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"Limit": limit,
"Product": product,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domains_usage_by_day(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainsUsageByDay', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cdn_types(self, security_token=None, owner_account=None, owner_id=None):
api_request = APIRequest('DescribeCdnTypes', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_domain_server_certificate(
self,
private_key=None,
force_set=None,
server_certificate_status=None,
server_certificate=None,
security_token=None,
cert_type=None,
cert_name=None,
domain_name=None,
owner_id=None,
region=None):
api_request = APIRequest('SetDomainServerCertificate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PrivateKey": private_key,
"ForceSet": force_set,
"ServerCertificateStatus": server_certificate_status,
"ServerCertificate": server_certificate,
"SecurityToken": security_token,
"CertType": cert_type,
"CertName": cert_name,
"DomainName": domain_name,
"OwnerId": owner_id,
"Region": region}
return self._handle_request(api_request).result
def modify_cdn_domain(
self,
top_level_domain=None,
source_port=None,
resource_group_id=None,
priorities=None,
sources=None,
security_token=None,
domain_name=None,
source_type=None,
owner_id=None):
api_request = APIRequest('ModifyCdnDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TopLevelDomain": top_level_domain,
"SourcePort": source_port,
"ResourceGroupId": resource_group_id,
"Priorities": priorities,
"Sources": sources,
"SecurityToken": security_token,
"DomainName": domain_name,
"SourceType": source_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_video_seek_config(
self,
security_token=None,
enable=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetVideoSeekConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_source_host_config(
self,
security_token=None,
enable=None,
domain_name=None,
owner_id=None,
back_src_domain=None):
api_request = APIRequest('SetSourceHostConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id,
"BackSrcDomain": back_src_domain}
return self._handle_request(api_request).result
def set_req_auth_config(
self,
key1=None,
key2=None,
auth_remote_desc=None,
security_token=None,
domain_name=None,
owner_id=None,
time_out=None,
auth_type=None):
api_request = APIRequest('SetReqAuthConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Key1": key1,
"Key2": key2,
"AuthRemoteDesc": auth_remote_desc,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id,
"TimeOut": time_out,
"AuthType": auth_type}
return self._handle_request(api_request).result
def set_referer_config(
self,
refer_list=None,
security_token=None,
domain_name=None,
refer_type=None,
disable_ast=None,
owner_id=None,
allow_empty=None):
api_request = APIRequest('SetRefererConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ReferList": refer_list,
"SecurityToken": security_token,
"DomainName": domain_name,
"ReferType": refer_type,
"DisableAst": disable_ast,
"OwnerId": owner_id,
"AllowEmpty": allow_empty}
return self._handle_request(api_request).result
def set_range_config(self, security_token=None, enable=None, domain_name=None, owner_id=None):
api_request = APIRequest('SetRangeConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_path_cache_expired_config(
self,
security_token=None,
domain_name=None,
weight=None,
cache_content=None,
owner_id=None,
ttl=None):
api_request = APIRequest('SetPathCacheExpiredConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"Weight": weight,
"CacheContent": cache_content,
"OwnerId": owner_id,
"TTL": ttl}
return self._handle_request(api_request).result
def set_page_compress_config(
self,
security_token=None,
enable=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetPageCompressConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_optimize_config(
self,
security_token=None,
enable=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetOptimizeConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_ignore_query_string_config(
self,
keep_oss_args=None,
hash_key_args=None,
security_token=None,
enable=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('SetIgnoreQueryStringConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"KeepOssArgs": keep_oss_args,
"HashKeyArgs": hash_key_args,
"SecurityToken": security_token,
"Enable": enable,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_http_header_config(
self,
header_value=None,
security_token=None,
config_id=None,
domain_name=None,
header_key=None,
owner_id=None):
api_request = APIRequest('SetHttpHeaderConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"HeaderValue": header_value,
"SecurityToken": security_token,
"ConfigId": config_id,
"DomainName": domain_name,
"HeaderKey": header_key,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_file_cache_expired_config(
self,
security_token=None,
domain_name=None,
weight=None,
cache_content=None,
owner_id=None,
ttl=None):
api_request = APIRequest('SetFileCacheExpiredConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"Weight": weight,
"CacheContent": cache_content,
"OwnerId": owner_id,
"TTL": ttl}
return self._handle_request(api_request).result
def set_error_page_config(
self,
page_type=None,
security_token=None,
domain_name=None,
custom_page_url=None,
owner_id=None):
api_request = APIRequest('SetErrorPageConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PageType": page_type,
"SecurityToken": security_token,
"DomainName": domain_name,
"CustomPageUrl": custom_page_url,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_cc_config(
self,
allow_ips=None,
security_token=None,
domain_name=None,
owner_id=None,
block_ips=None):
api_request = APIRequest('SetCcConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AllowIps": allow_ips,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id,
"BlockIps": block_ips}
return self._handle_request(api_request).result
def modify_path_cache_expired_config(
self,
security_token=None,
config_id=None,
domain_name=None,
weight=None,
cache_content=None,
owner_id=None,
ttl=None):
api_request = APIRequest('ModifyPathCacheExpiredConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ConfigID": config_id,
"DomainName": domain_name,
"Weight": weight,
"CacheContent": cache_content,
"OwnerId": owner_id,
"TTL": ttl}
return self._handle_request(api_request).result
def modify_http_header_config(
self,
header_value=None,
security_token=None,
config_id=None,
domain_name=None,
header_key=None,
owner_id=None):
api_request = APIRequest('ModifyHttpHeaderConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"HeaderValue": header_value,
"SecurityToken": security_token,
"ConfigID": config_id,
"DomainName": domain_name,
"HeaderKey": header_key,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_file_cache_expired_config(
self,
security_token=None,
config_id=None,
domain_name=None,
weight=None,
cache_content=None,
owner_id=None,
ttl=None):
api_request = APIRequest('ModifyFileCacheExpiredConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ConfigID": config_id,
"DomainName": domain_name,
"Weight": weight,
"CacheContent": cache_content,
"OwnerId": owner_id,
"TTL": ttl}
return self._handle_request(api_request).result
def describe_refresh_quota(self, security_token=None, owner_id=None):
api_request = APIRequest('DescribeRefreshQuota', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_ip_info(self, security_token=None, ip=None, owner_id=None):
api_request = APIRequest('DescribeIpInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "IP": ip, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_uv_data(
self,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainUvData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_top_url_visit(
self,
security_token=None,
domain_name=None,
sort_by=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainTopUrlVisit', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"SortBy": sort_by,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_top_refer_visit(
self,
security_token=None,
domain_name=None,
sort_by=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainTopReferVisit', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"SortBy": sort_by,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_src_flow_data(
self,
start_time=None,
fix_time_gap=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainSrcFlowData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"FixTimeGap": fix_time_gap,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_src_bps_data(
self,
start_time=None,
fix_time_gap=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainSrcBpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"FixTimeGap": fix_time_gap,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_region_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainRegionData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_qps_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_type=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainQpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainType": domain_type,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_pv_data(
self,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainPvData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_isp_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainISPData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_http_code_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainHttpCodeData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_hit_rate_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainHitRateData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_flow_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_type=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainFlowData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainType": domain_type,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_domain_file_size_proportion_data(
self,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainFileSizeProportionData',
'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_configs(
self,
security_token=None,
domain_name=None,
config_list=None,
owner_id=None):
api_request = APIRequest('DescribeDomainConfigs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"ConfigList": config_list,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_cc_data(
self,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainCCData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_cc_attack_info(
self,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeDomainCCAttackInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_domain_bps_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_type=None,
time_merge=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeDomainBpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainType": domain_type,
"TimeMerge": time_merge,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def delete_http_header_config(
self,
security_token=None,
config_id=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteHttpHeaderConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ConfigID": config_id,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_cache_expired_config(
self,
cache_type=None,
security_token=None,
config_id=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteCacheExpiredConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"CacheType": cache_type,
"SecurityToken": security_token,
"ConfigID": config_id,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_live_streams_notify_url_config(
self,
security_token=None,
domain_name=None,
notify_url=None,
owner_id=None):
api_request = APIRequest('SetLiveStreamsNotifyUrlConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"NotifyUrl": notify_url,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_live_stream_online_user_num(
self,
app_name=None,
security_token=None,
hls_switch=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('DescribeLiveStreamOnlineUserNum', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"HlsSwitch": hls_switch,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def resume_live_stream(
self,
app_name=None,
security_token=None,
live_stream_type=None,
domain_name=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('ResumeLiveStream', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"LiveStreamType": live_stream_type,
"DomainName": domain_name,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def forbid_live_stream(
self,
resume_time=None,
app_name=None,
security_token=None,
live_stream_type=None,
domain_name=None,
owner_id=None,
stream_name=None):
api_request = APIRequest('ForbidLiveStream', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResumeTime": resume_time,
"AppName": app_name,
"SecurityToken": security_token,
"LiveStreamType": live_stream_type,
"DomainName": domain_name,
"OwnerId": owner_id,
"StreamName": stream_name}
return self._handle_request(api_request).result
def describe_live_streams_publish_list(
self,
app_name=None,
security_token=None,
domain_name=None,
page_size=None,
end_time=None,
start_time=None,
owner_id=None,
stream_name=None,
page_number=None):
api_request = APIRequest('DescribeLiveStreamsPublishList', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"PageSize": page_size,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"StreamName": stream_name,
"PageNumber": page_number}
return self._handle_request(api_request).result
def describe_live_streams_online_list(
self,
stream_type=None,
app_name=None,
security_token=None,
domain_name=None,
page_size=None,
owner_id=None,
page_num=None):
api_request = APIRequest('DescribeLiveStreamsOnlineList', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StreamType": stream_type,
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"PageSize": page_size,
"OwnerId": owner_id,
"PageNum": page_num}
return self._handle_request(api_request).result
def describe_live_streams_control_history(
self,
app_name=None,
security_token=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None):
api_request = APIRequest('DescribeLiveStreamsControlHistory', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AppName": app_name,
"SecurityToken": security_token,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_live_streams_block_list(
self,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DescribeLiveStreamsBlockList', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cdn_domain_logs(
self,
security_token=None,
domain_name=None,
page_size=None,
end_time=None,
start_time=None,
owner_id=None,
page_number=None,
log_day=None):
api_request = APIRequest('DescribeCdnDomainLogs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"PageSize": page_size,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"PageNumber": page_number,
"LogDay": log_day}
return self._handle_request(api_request).result
def describe_cdn_domain_detail(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeCdnDomainDetail', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_cdn_domain(
self,
resource_group_id=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteCdnDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceGroupId": resource_group_id,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_cdn_domain(
self,
top_level_domain=None,
sources=None,
owner_account=None,
domain_name=None,
owner_id=None,
resource_group_id=None,
source_port=None,
priorities=None,
security_token=None,
cdn_type=None,
scope=None,
source_type=None,
check_url=None,
region=None):
api_request = APIRequest('AddCdnDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TopLevelDomain": top_level_domain,
"Sources": sources,
"OwnerAccount": owner_account,
"DomainName": domain_name,
"OwnerId": owner_id,
"ResourceGroupId": resource_group_id,
"SourcePort": source_port,
"Priorities": priorities,
"SecurityToken": security_token,
"CdnType": cdn_type,
"Scope": scope,
"SourceType": source_type,
"CheckUrl": check_url,
"Region": region}
return self._handle_request(api_request).result
def push_object_cache(self, area=None, security_token=None, object_path=None, owner_id=None):
api_request = APIRequest('PushObjectCache', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Area": area,
"SecurityToken": security_token,
"ObjectPath": object_path,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def open_cdn_service(self, security_token=None, internet_charge_type=None, owner_id=None):
api_request = APIRequest('OpenCdnService', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"InternetChargeType": internet_charge_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def modify_cdn_service(self, security_token=None, internet_charge_type=None, owner_id=None):
api_request = APIRequest('ModifyCdnService', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"InternetChargeType": internet_charge_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_user_domains(
self,
func_filter=None,
sources=None,
domain_name=None,
owner_id=None,
func_id=None,
page_number=None,
domain_status=None,
domain_search_type=None,
check_domain_show=None,
resource_group_id=None,
security_token=None,
cdn_type=None,
page_size=None):
api_request = APIRequest('DescribeUserDomains', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"FuncFilter": func_filter,
"Sources": sources,
"DomainName": domain_name,
"OwnerId": owner_id,
"FuncId": func_id,
"PageNumber": page_number,
"DomainStatus": domain_status,
"DomainSearchType": domain_search_type,
"CheckDomainShow": check_domain_show,
"ResourceGroupId": resource_group_id,
"SecurityToken": security_token,
"CdnType": cdn_type,
"PageSize": page_size}
return self._handle_request(api_request).result
def describe_refresh_tasks(
self,
object_path=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
page_number=None,
resource_group_id=None,
security_token=None,
page_size=None,
object_type=None,
task_id=None,
status=None):
api_request = APIRequest('DescribeRefreshTasks', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ObjectPath": object_path,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"PageNumber": page_number,
"ResourceGroupId": resource_group_id,
"SecurityToken": security_token,
"PageSize": page_size,
"ObjectType": object_type,
"TaskId": task_id,
"Status": status}
return self._handle_request(api_request).result
def describe_cdn_service(self, security_token=None, owner_id=None):
api_request = APIRequest('DescribeCdnService', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_cdn_monitor_data(
self,
start_time=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeCdnMonitorData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def stop_cdn_domain(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('StopCdnDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def start_cdn_domain(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('StartCdnDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def refresh_object_caches(
self,
security_token=None,
object_path=None,
owner_id=None,
object_type=None):
api_request = APIRequest('RefreshObjectCaches', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ObjectPath": object_path,
"OwnerId": owner_id,
"ObjectType": object_type}
return self._handle_request(api_request).result
| 37.242817
| 100
| 0.576845
| 7,819
| 80,370
| 5.576544
| 0.071876
| 0.093572
| 0.03431
| 0.074857
| 0.803087
| 0.791505
| 0.777538
| 0.739789
| 0.722657
| 0.707704
| 0
| 0.000643
| 0.322546
| 80,370
| 2,157
| 101
| 37.260083
| 0.800191
| 0.007154
| 0
| 0.771457
| 0
| 0
| 0.150474
| 0.030659
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068363
| false
| 0
| 0.000998
| 0
| 0.137725
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d688ef9c26028239784eed0cb41ed8c75cffde0
| 20,802
|
py
|
Python
|
preference_discovery/pages.py
|
ekoyudhi/preference-discovery
|
9ad099f98de189379673160dfac9433a87215ff7
|
[
"MIT"
] | null | null | null |
preference_discovery/pages.py
|
ekoyudhi/preference-discovery
|
9ad099f98de189379673160dfac9433a87215ff7
|
[
"MIT"
] | null | null | null |
preference_discovery/pages.py
|
ekoyudhi/preference-discovery
|
9ad099f98de189379673160dfac9433a87215ff7
|
[
"MIT"
] | 1
|
2022-03-21T02:34:15.000Z
|
2022-03-21T02:34:15.000Z
|
from random import randint, random
from ._builtin import Page
class No1Introduction(Page):
def is_displayed(self):
return self.round_number == 1
def before_next_page(self):
self.player.sequence_setup()
self.participant.vars['payoff_round_all'] = 0
class No2Instructions1(Page):
def is_displayed(self):
return self.round_number == 1
def vars_for_template(self):
return {
'endowment': self.session.config["endowment"],
'show_up_fee': int(self.session.config["participation_fee"]),
}
class No2Instructions2(Page):
def is_displayed(self):
return self.round_number == 1
def vars_for_template(self):
return {
'endowment': self.session.config["endowment"],
'instrument': "preference_discovery/SC-1.jpeg",
'result': "preference_discovery/SC-2.jpeg",
'show_up_fee': int(self.session.config["participation_fee"]),
}
class No2Instructions3(Page):
def is_displayed(self):
return self.round_number == 1
def vars_for_template(self):
return {
'endowment': self.session.config["endowment"],
'show_up_fee': int(self.session.config["participation_fee"]),
}
class No2Instructions4(Page):
def is_displayed(self):
return self.round_number == 1
def vars_for_template(self):
return {
'endowment': self.session.config["endowment"],
'show_up_fee': int(self.session.config["participation_fee"]),
}
class No2Warning(Page):
def is_displayed(self):
return self.round_number == self.session.config["training_rounds"] + 1
class No3Start0(Page):
def is_displayed(self):
return self.round_number in [1, 2, 3]
def before_next_page(self, **kwargs):
return {self.player.set_player_param()}
def vars_for_template(self):
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
}
class No3Start1(Page):
def is_displayed(self):
return self.round_number in [4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
def before_next_page(self, **kwargs):
return {self.player.set_player_param()}
def vars_for_template(self):
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
}
class No3Start2(Page):
def is_displayed(self):
return self.round_number in [14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
def before_next_page(self, **kwargs):
return {self.player.set_player_param()}
def vars_for_template(self):
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
}
class No3Start3(Page):
def is_displayed(self):
return self.round_number in [24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
def before_next_page(self, **kwargs):
return {self.player.set_player_param()}
def vars_for_template(self):
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
}
class No4Purchase0(Page):
def is_displayed(self):
return self.round_number in [1, 2, 3]
def vars_for_template(self):
p = self.participant.vars['displayed_prospects']
return {
'p': self.participant.vars['displayed_prospects'],
'rand_index': self.participant.vars["random_indexes"],
'payoff_vector': self.participant.vars["payoff_vector"],
'endowment': self.session.config["endowment"],
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'lot_1': p.iloc[0, 2], 'gain_A_1': p.iloc[0, 3], 'prob_A_1': p.iloc[0, 4], 'gain_B_1': p.iloc[0, 5],
'prob_B_1': p.iloc[0, 6], 'rel_1': p.iloc[0, 7],
'lot_2': p.iloc[1, 2], 'gain_A_2': p.iloc[1, 3], 'prob_A_2': p.iloc[1, 4], 'gain_B_2': p.iloc[1, 5],
'prob_B_2': p.iloc[1, 6], 'rel_2': p.iloc[1, 7],
'lot_3': p.iloc[2, 2], 'gain_A_3': p.iloc[2, 3], 'prob_A_3': p.iloc[2, 4], 'gain_B_3': p.iloc[2, 5],
'prob_B_3': p.iloc[2, 6], 'rel_3': p.iloc[2, 7],
'lot_4': p.iloc[3, 2], 'gain_A_4': p.iloc[3, 3], 'prob_A_4': p.iloc[3, 4], 'gain_B_4': p.iloc[3, 5],
'prob_B_4': p.iloc[3, 6], 'rel_4': p.iloc[3, 7],
'lot_5': p.iloc[4, 2], 'gain_A_5': p.iloc[4, 3], 'prob_A_5': p.iloc[4, 4], 'gain_B_5': p.iloc[4, 5],
'prob_B_5': p.iloc[4, 6], 'rel_5': p.iloc[4, 7],
'df': self.participant.vars["prospect_table"],
'pagehold_timer': self.session.config['submit_delay'],
'pagehold_timer_ths': self.session.config['submit_delay'] * 1000,
}
form_model = 'player'
def get_form_fields(self):
fields = ['Lotere_A', 'Lotere_B', 'Lotere_C', 'Lotere_D', 'Lotere_E']
return fields
def error_message(self, values):
if values['Lotere_A'] + values['Lotere_B'] + values['Lotere_C'] + values['Lotere_D'] + values['Lotere_E'] <= \
self.session.config["endowment"]:
return
return 'Total alokasi untuk seluruh alternatif tidak boleh lebih dari {0} poin!'.format(
str(self.session.config["endowment"]))
def before_next_page(self, **kwargs):
return {self.player.payoff_realizer()}
class No4Purchase1(Page):
def is_displayed(self):
return self.round_number in [4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
def vars_for_template(self):
p = self.participant.vars['displayed_prospects']
return {
'p': self.participant.vars['displayed_prospects'],
'rand_index': self.participant.vars["random_indexes"],
'payoff_vector': self.participant.vars["payoff_vector"],
'endowment': self.session.config["endowment"],
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'lot_1': p.iloc[0, 2], 'gain_A_1': p.iloc[0, 3], 'prob_A_1': p.iloc[0, 4], 'gain_B_1': p.iloc[0, 5],
'prob_B_1': p.iloc[0, 6], 'rel_1': p.iloc[0, 7],
'lot_2': p.iloc[1, 2], 'gain_A_2': p.iloc[1, 3], 'prob_A_2': p.iloc[1, 4], 'gain_B_2': p.iloc[1, 5],
'prob_B_2': p.iloc[1, 6], 'rel_2': p.iloc[1, 7],
'lot_3': p.iloc[2, 2], 'gain_A_3': p.iloc[2, 3], 'prob_A_3': p.iloc[2, 4], 'gain_B_3': p.iloc[2, 5],
'prob_B_3': p.iloc[2, 6], 'rel_3': p.iloc[2, 7],
'lot_4': p.iloc[3, 2], 'gain_A_4': p.iloc[3, 3], 'prob_A_4': p.iloc[3, 4], 'gain_B_4': p.iloc[3, 5],
'prob_B_4': p.iloc[3, 6], 'rel_4': p.iloc[3, 7],
'lot_5': p.iloc[4, 2], 'gain_A_5': p.iloc[4, 3], 'prob_A_5': p.iloc[4, 4], 'gain_B_5': p.iloc[4, 5],
'prob_B_5': p.iloc[4, 6], 'rel_5': p.iloc[4, 7],
'df': self.participant.vars["prospect_table"],
'pagehold_timer': self.session.config['submit_delay'],
'pagehold_timer_ths': self.session.config['submit_delay'] * 1000,
}
form_model = 'player'
def get_form_fields(self):
fields = ['Lotere_A', 'Lotere_B', 'Lotere_C', 'Lotere_D', 'Lotere_E']
return fields
def error_message(self, values):
if values['Lotere_A'] + values['Lotere_B'] + values['Lotere_C'] + values['Lotere_D'] + values['Lotere_E'] <= \
self.session.config["endowment"]:
return
return 'Total alokasi untuk seluruh alternatif tidak boleh lebih dari {0} poin!'.format(
str(self.session.config["endowment"]))
def before_next_page(self, **kwargs):
return {self.player.payoff_realizer()}
class No4Purchase2(Page):
def is_displayed(self):
return self.round_number in [14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
def vars_for_template(self):
p = self.participant.vars['displayed_prospects']
return {
'p': self.participant.vars['displayed_prospects'],
'rand_index': self.participant.vars["random_indexes"],
'payoff_vector': self.participant.vars["payoff_vector"],
'endowment': self.session.config["endowment"],
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'lot_1': p.iloc[0, 2], 'gain_A_1': p.iloc[0, 3], 'prob_A_1': p.iloc[0, 4], 'gain_B_1': p.iloc[0, 5],
'prob_B_1': p.iloc[0, 6], 'rel_1': p.iloc[0, 7],
'lot_2': p.iloc[1, 2], 'gain_A_2': p.iloc[1, 3], 'prob_A_2': p.iloc[1, 4], 'gain_B_2': p.iloc[1, 5],
'prob_B_2': p.iloc[1, 6], 'rel_2': p.iloc[1, 7],
'lot_3': p.iloc[2, 2], 'gain_A_3': p.iloc[2, 3], 'prob_A_3': p.iloc[2, 4], 'gain_B_3': p.iloc[2, 5],
'prob_B_3': p.iloc[2, 6], 'rel_3': p.iloc[2, 7],
'lot_4': p.iloc[3, 2], 'gain_A_4': p.iloc[3, 3], 'prob_A_4': p.iloc[3, 4], 'gain_B_4': p.iloc[3, 5],
'prob_B_4': p.iloc[3, 6], 'rel_4': p.iloc[3, 7],
'lot_5': p.iloc[4, 2], 'gain_A_5': p.iloc[4, 3], 'prob_A_5': p.iloc[4, 4], 'gain_B_5': p.iloc[4, 5],
'prob_B_5': p.iloc[4, 6], 'rel_5': p.iloc[4, 7],
'df': self.participant.vars["prospect_table"],
'pagehold_timer': self.session.config['submit_delay'],
'pagehold_timer_ths': self.session.config['submit_delay'] * 1000,
}
form_model = 'player'
def get_form_fields(self):
fields = ['Lotere_A', 'Lotere_B', 'Lotere_C', 'Lotere_D', 'Lotere_E']
return fields
def error_message(self, values):
if values['Lotere_A'] + values['Lotere_B'] + values['Lotere_C'] + values['Lotere_D'] + values['Lotere_E'] <= \
self.session.config["endowment"]:
return
return 'Total alokasi untuk seluruh alternatif tidak boleh lebih dari {0} poin!'.format(
str(self.session.config["endowment"]))
def before_next_page(self, **kwargs):
return {self.player.payoff_realizer()}
class No4Purchase3(Page):
def is_displayed(self):
return self.round_number in [24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
def vars_for_template(self):
p = self.participant.vars['displayed_prospects']
return {
'p': self.participant.vars['displayed_prospects'],
'rand_index': self.participant.vars["random_indexes"],
'payoff_vector': self.participant.vars["payoff_vector"],
'endowment': self.session.config["endowment"],
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'lot_1': p.iloc[0, 2], 'gain_A_1': p.iloc[0, 3], 'prob_A_1': p.iloc[0, 4], 'gain_B_1': p.iloc[0, 5],
'prob_B_1': p.iloc[0, 6], 'rel_1': p.iloc[0, 7],
'lot_2': p.iloc[1, 2], 'gain_A_2': p.iloc[1, 3], 'prob_A_2': p.iloc[1, 4], 'gain_B_2': p.iloc[1, 5],
'prob_B_2': p.iloc[1, 6], 'rel_2': p.iloc[1, 7],
'lot_3': p.iloc[2, 2], 'gain_A_3': p.iloc[2, 3], 'prob_A_3': p.iloc[2, 4], 'gain_B_3': p.iloc[2, 5],
'prob_B_3': p.iloc[2, 6], 'rel_3': p.iloc[2, 7],
'lot_4': p.iloc[3, 2], 'gain_A_4': p.iloc[3, 3], 'prob_A_4': p.iloc[3, 4], 'gain_B_4': p.iloc[3, 5],
'prob_B_4': p.iloc[3, 6], 'rel_4': p.iloc[3, 7],
'lot_5': p.iloc[4, 2], 'gain_A_5': p.iloc[4, 3], 'prob_A_5': p.iloc[4, 4], 'gain_B_5': p.iloc[4, 5],
'prob_B_5': p.iloc[4, 6], 'rel_5': p.iloc[4, 7],
'df': self.participant.vars["prospect_table"],
'pagehold_timer': self.session.config['submit_delay'],
'pagehold_timer_ths': self.session.config['submit_delay'] * 1000,
}
form_model = 'player'
def get_form_fields(self):
fields = ['Lotere_A', 'Lotere_B', 'Lotere_C', 'Lotere_D', 'Lotere_E']
return fields
def error_message(self, values):
if values['Lotere_A'] + values['Lotere_B'] + values['Lotere_C'] + values['Lotere_D'] + values['Lotere_E'] <= \
self.session.config["endowment"]:
return
return 'Total alokasi untuk seluruh alternatif tidak boleh lebih dari {0} poin!'.format(
str(self.session.config["endowment"]))
def before_next_page(self, **kwargs):
return {self.player.payoff_realizer()}
class No5Result0(Page):
def is_displayed(self):
return self.round_number in [1, 2, 3]
def vars_for_template(self):
df = self.participant.vars['displayed_prospects'][["x1", "x2", "Allocation", "A_or_B", "payoff"]]
p = self.participant.vars['displayed_prospects']
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'A1': df.iloc[0, 0], 'B1': df.iloc[0, 1], 'C1': df.iloc[0, 2], 'D1': df.iloc[0, 3], 'E1': df.iloc[0, 4],
'A2': df.iloc[1, 0], 'B2': df.iloc[1, 1], 'C2': df.iloc[1, 2], 'D2': df.iloc[1, 3], 'E2': df.iloc[1, 4],
'A3': df.iloc[2, 0], 'B3': df.iloc[2, 1], 'C3': df.iloc[2, 2], 'D3': df.iloc[2, 3], 'E3': df.iloc[2, 4],
'A4': df.iloc[3, 0], 'B4': df.iloc[3, 1], 'C4': df.iloc[3, 2], 'D4': df.iloc[3, 3], 'E4': df.iloc[3, 4],
'A5': df.iloc[4, 0], 'B5': df.iloc[4, 1], 'C5': df.iloc[4, 2], 'D5': df.iloc[4, 3], 'E5': df.iloc[4, 4],
'payoff_thisround': self.player.payoff_thisround,
'lot_1': p.iloc[0, 2],
'lot_2': p.iloc[1, 2],
'lot_3': p.iloc[2, 2],
'lot_4': p.iloc[3, 2],
'lot_5': p.iloc[4, 2]
}
class No5Result1(Page):
def is_displayed(self):
return self.round_number in [4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
def vars_for_template(self):
df = self.participant.vars['displayed_prospects'][["x1", "x2", "Allocation", "A_or_B", "payoff"]]
p = self.participant.vars['displayed_prospects']
self.participant.vars['payoff_round_'+str(self.round_number)] = self.player.payoff_thisround
self.participant.vars['payoff_round_all'] += self.player.payoff_thisround
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'A1': df.iloc[0, 0], 'B1': df.iloc[0, 1], 'C1': df.iloc[0, 2], 'D1': df.iloc[0, 3], 'E1': df.iloc[0, 4],
'A2': df.iloc[1, 0], 'B2': df.iloc[1, 1], 'C2': df.iloc[1, 2], 'D2': df.iloc[1, 3], 'E2': df.iloc[1, 4],
'A3': df.iloc[2, 0], 'B3': df.iloc[2, 1], 'C3': df.iloc[2, 2], 'D3': df.iloc[2, 3], 'E3': df.iloc[2, 4],
'A4': df.iloc[3, 0], 'B4': df.iloc[3, 1], 'C4': df.iloc[3, 2], 'D4': df.iloc[3, 3], 'E4': df.iloc[3, 4],
'A5': df.iloc[4, 0], 'B5': df.iloc[4, 1], 'C5': df.iloc[4, 2], 'D5': df.iloc[4, 3], 'E5': df.iloc[4, 4],
'payoff_thisround': self.player.payoff_thisround,
'lot_1': p.iloc[0, 2],
'lot_2': p.iloc[1, 2],
'lot_3': p.iloc[2, 2],
'lot_4': p.iloc[3, 2],
'lot_5': p.iloc[4, 2]
}
class No5Result2(Page):
def is_displayed(self):
return self.round_number in [14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
def vars_for_template(self):
df = self.participant.vars['displayed_prospects'][["x1", "x2", "Allocation", "A_or_B", "payoff"]]
p = self.participant.vars['displayed_prospects']
self.participant.vars['payoff_round_'+str(self.round_number)] = self.player.payoff_thisround
self.participant.vars['payoff_round_all'] += self.player.payoff_thisround
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'A1': df.iloc[0, 0], 'B1': df.iloc[0, 1], 'C1': df.iloc[0, 2], 'D1': df.iloc[0, 3], 'E1': df.iloc[0, 4],
'A2': df.iloc[1, 0], 'B2': df.iloc[1, 1], 'C2': df.iloc[1, 2], 'D2': df.iloc[1, 3], 'E2': df.iloc[1, 4],
'A3': df.iloc[2, 0], 'B3': df.iloc[2, 1], 'C3': df.iloc[2, 2], 'D3': df.iloc[2, 3], 'E3': df.iloc[2, 4],
'A4': df.iloc[3, 0], 'B4': df.iloc[3, 1], 'C4': df.iloc[3, 2], 'D4': df.iloc[3, 3], 'E4': df.iloc[3, 4],
'A5': df.iloc[4, 0], 'B5': df.iloc[4, 1], 'C5': df.iloc[4, 2], 'D5': df.iloc[4, 3], 'E5': df.iloc[4, 4],
'payoff_thisround': self.player.payoff_thisround,
'lot_1': p.iloc[0, 2],
'lot_2': p.iloc[1, 2],
'lot_3': p.iloc[2, 2],
'lot_4': p.iloc[3, 2],
'lot_5': p.iloc[4, 2]
}
class No5Result3(Page):
def is_displayed(self):
return self.round_number in [24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
def vars_for_template(self):
df = self.participant.vars['displayed_prospects'][["x1", "x2", "Allocation", "A_or_B", "payoff"]]
p = self.participant.vars['displayed_prospects']
self.participant.vars['payoff_round_'+str(self.round_number)] = self.player.payoff_thisround
self.participant.vars['payoff_round_all'] += self.player.payoff_thisround
return {
'training': self.round_number <= self.session.config["training_rounds"],
'training_round': self.round_number,
'round': self.round_number - self.session.config["training_rounds"],
'A1': df.iloc[0, 0], 'B1': df.iloc[0, 1], 'C1': df.iloc[0, 2], 'D1': df.iloc[0, 3], 'E1': df.iloc[0, 4],
'A2': df.iloc[1, 0], 'B2': df.iloc[1, 1], 'C2': df.iloc[1, 2], 'D2': df.iloc[1, 3], 'E2': df.iloc[1, 4],
'A3': df.iloc[2, 0], 'B3': df.iloc[2, 1], 'C3': df.iloc[2, 2], 'D3': df.iloc[2, 3], 'E3': df.iloc[2, 4],
'A4': df.iloc[3, 0], 'B4': df.iloc[3, 1], 'C4': df.iloc[3, 2], 'D4': df.iloc[3, 3], 'E4': df.iloc[3, 4],
'A5': df.iloc[4, 0], 'B5': df.iloc[4, 1], 'C5': df.iloc[4, 2], 'D5': df.iloc[4, 3], 'E5': df.iloc[4, 4],
'payoff_thisround': self.player.payoff_thisround,
'lot_1': p.iloc[0, 2],
'lot_2': p.iloc[1, 2],
'lot_3': p.iloc[2, 2],
'lot_4': p.iloc[3, 2],
'lot_5': p.iloc[4, 2]
}
class No6EndQuestionnaire(Page):
def is_displayed(self):
return self.round_number == self.session.config['rounds']
class No6EndResult(Page):
def is_displayed(self):
return self.round_number == self.session.config['rounds']
def vars_for_template(self):
rnd = randint(4,33)
payoff_selected = self.participant.vars['payoff_round_'+str(rnd)]
payoff_all = self.participant.vars['payoff_round_all']
return {
'player_payoff': self.player.payoff,
'round_selected' : rnd,
'payoff_selected' : payoff_selected,
'payoff_selected_rupiah' : payoff_selected * 1000 + 10000,
'payoff_all' : payoff_all,
'payoff_all_rupiah' : payoff_all * 1000 + 10000,
}
page_sequence = [No1Introduction,
No2Instructions1,
No2Instructions2,
No2Instructions3,
No2Instructions4,
No2Warning,
No3Start0,
No4Purchase0,
No5Result0,
No3Start1,
No4Purchase1,
No5Result1,
No3Start2,
No4Purchase2,
No5Result2,
No3Start3,
No4Purchase3,
No5Result3,
No6EndQuestionnaire,
No6EndResult]
| 45.5186
| 118
| 0.567686
| 3,047
| 20,802
| 3.68428
| 0.059075
| 0.062355
| 0.078835
| 0.050775
| 0.922145
| 0.918582
| 0.909763
| 0.909763
| 0.908516
| 0.908516
| 0
| 0.065499
| 0.250649
| 20,802
| 456
| 119
| 45.618421
| 0.65467
| 0
| 0
| 0.803714
| 0
| 0
| 0.195654
| 0.003942
| 0
| 0
| 0
| 0
| 0
| 1
| 0.143236
| false
| 0
| 0.005305
| 0.095491
| 0.352785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d6b495351ca2730040fe83ff86f1e78ac5d1c3f
| 116,566
|
py
|
Python
|
tools/networks.py
|
Morwenn/SOSN
|
bea195b8c97255a0526117a3fcac5274389b94d2
|
[
"BSL-1.0"
] | 1
|
2021-07-16T00:33:00.000Z
|
2021-07-16T00:33:00.000Z
|
tools/networks.py
|
Morwenn/SOSN
|
bea195b8c97255a0526117a3fcac5274389b94d2
|
[
"BSL-1.0"
] | 3
|
2021-07-12T08:46:57.000Z
|
2021-07-12T08:50:07.000Z
|
tools/networks.py
|
Morwenn/SOSN
|
bea195b8c97255a0526117a3fcac5274389b94d2
|
[
"BSL-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
networks = {
13: [
[(0,12),(1,10),(2,9),(3,7),(5,11),(6,8)],
[(1,6),(2,3),(4,11),(7,9),(8,10)],
[(0,4),(1,2),(3,6),(7,8),(9,10),(11,12)],
[(4,6),(5,9),(8,11),(10,12)],
[(0,5),(3,8),(4,7),(6,11),(9,10)],
[(0,1),(2,5),(6,9),(7,8),(10,11)],
[(1,3),(2,4),(5,6),(9,10)],
[(1,2),(3,4),(5,7),(6,8)],
[(2,3),(4,5),(6,7),(8,9)],
[(3,4),(5,6)],
],
14: [
[(0,6),(1,11),(2,12),(3,10),(4,5),(7,13),(8,9)],
[(1,2),(3,7),(4,8),(5,9),(6,10),(11,12)],
[(0,4),(1,3),(5,6),(7,8),(9,13),(10,12)],
[(0,1),(2,9),(3,7),(4,11),(6,10),(12,13)],
[(2,5),(4,7),(6,9),(8,11)],
[(1,2),(3,4),(6,7),(9,10),(11,12)],
[(1,3),(2,4),(5,6),(7,8),(9,11),(10,12)],
[(2,3),(4,7),(6,9),(10,11)],
[(4,5),(6,7),(8,9)],
[(3,4),(5,6),(7,8),(9,10)],
],
15: [
[(1,2),(3,10),(4,14),(5,8),(6,13),(7,12),(9,11)],
[(0,14),(1,5),(2,8),(3,7),(6,9),(10,12),(11,13)],
[(0,7),(1,6),(2,9),(4,10),(5,11),(8,13),(12,14)],
[(0,6),(2,4),(3,5),(7,11),(8,10),(9,12),(13,14)],
[(0,3),(1,2),(4,7),(5,9),(6,8),(10,11),(12,13)],
[(0,1),(2,3),(4,6),(7,9),(10,12),(11,13)],
[(1,2),(3,5),(8,10),(11,12)],
[(3,4),(5,6),(7,8),(9,10)],
[(2,3),(4,5),(6,7),(8,9),(10,11)],
[(5,6),(7,8)],
],
16: [
[(0,13),(1,12),(2,15),(3,14),(4,8),(5,6),(7,11),(9,10)],
[(0,5),(1,7),(2,9),(3,4),(6,13),(8,14),(10,15),(11,12)],
[(0,1),(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,15)],
[(0,2),(1,3),(4,10),(5,11),(6,7),(8,9),(12,14),(13,15)],
[(1,2),(3,12),(4,6),(5,7),(8,10),(9,11),(13,14)],
[(1,4),(2,6),(5,8),(7,10),(9,13),(11,14)],
[(2,4),(3,6),(9,12),(11,13)],
[(3,5),(6,8),(7,9),(10,12)],
[(3,4),(5,6),(7,8),(9,10),(11,12)],
[(6,7),(8,9)],
],
17: [
[(0,11),(1,15),(2,10),(3,5),(4,6),(8,12),(9,16),(13,14)],
[(0,6),(1,13),(2,8),(4,14),(5,15),(7,11)],
[(0,8),(3,7),(4,9),(6,16),(10,11),(12,14)],
[(0,2),(1,4),(5,6),(7,13),(8,9),(10,12),(11,14),(15,16)],
[(0,3),(2,5),(6,11),(7,10),(9,13),(12,15),(14,16)],
[(0,1),(3,4),(5,10),(6,9),(7,8),(11,15),(13,14)],
[(1,2),(3,7),(4,8),(6,12),(11,13),(14,15)],
[(1,3),(2,7),(4,5),(9,11),(10,12),(13,14)],
[(2,3),(4,6),(5,7),(8,10)],
[(3,4),(6,8),(7,9),(10,12)],
[(5,6),(7,8),(9,10),(11,12)],
[(4,5),(6,7),(8,9),(10,11),(12,13)],
],
18: [
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17)],
[(1,5),(2,6),(3,7),(4,10),(8,16),(9,17),(12,14),(13,15)],
[(0,8),(1,10),(2,12),(3,14),(6,13),(7,15),(9,16),(11,17)],
[(0,4),(1,9),(5,17),(8,11),(10,16)],
[(0,2),(1,6),(4,10),(5,9),(14,16),(15,17)],
[(1,2),(3,10),(4,12),(5,7),(6,14),(9,13),(15,16)],
[(3,8),(5,12),(7,11),(9,10)],
[(3,4),(6,8),(7,14),(9,12),(11,13)],
[(1,3),(2,4),(7,9),(8,12),(11,15),(13,16)],
[(2,3),(4,5),(6,7),(10,11),(12,14),(13,15)],
[(4,6),(5,8),(9,10),(11,14)],
[(3,4),(5,7),(8,9),(10,12),(13,14)],
[(5,6),(7,8),(9,10),(11,12)],
],
19: [
[(0,12),(1,4),(2,8),(3,5),(6,17),(7,11),(9,14),(10,13),(15,16)],
[(0,2),(1,7),(3,6),(4,11),(5,17),(8,12),(10,15),(13,16),(14,18)],
[(3,10),(4,14),(5,15),(6,13),(7,9),(11,17),(16,18)],
[(0,7),(1,10),(4,6),(9,15),(11,16),(12,17),(13,14)],
[(0,3),(2,6),(5,7),(8,11),(12,16)],
[(1,8),(2,9),(3,4),(6,15),(7,13),(10,11),(12,18)],
[(1,3),(2,5),(6,9),(7,12),(8,10),(11,14),(17,18)],
[(0,1),(2,3),(4,8),(6,10),(9,12),(14,15),(16,17)],
[(1,2),(5,8),(6,7),(9,11),(10,13),(14,16),(15,17)],
[(3,6),(4,5),(7,9),(8,10),(11,12),(13,14),(15,16)],
[(3,4),(5,6),(7,8),(9,10),(11,13),(12,14)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15)],
],
20: [
[(0,3),(1,7),(2,5),(4,8),(6,9),(10,13),(11,15),(12,18),(14,17),(16,19)],
[(0,14),(1,11),(2,16),(3,17),(4,12),(5,19),(6,10),(7,15),(8,18),(9,13)],
[(0,4),(1,2),(3,8),(5,7),(11,16),(12,14),(15,19),(17,18)],
[(1,6),(2,12),(3,5),(4,11),(7,17),(8,15),(13,18),(14,16)],
[(0,1),(2,6),(7,10),(9,12),(13,17),(18,19)],
[(1,6),(5,9),(7,11),(8,12),(10,14),(13,18)],
[(3,5),(4,7),(8,10),(9,11),(12,15),(14,16)],
[(1,3),(2,4),(5,7),(6,10),(9,13),(12,14),(15,17),(16,18)],
[(1,2),(3,4),(6,7),(8,9),(10,11),(12,13),(15,16),(17,18)],
[(2,3),(4,6),(5,8),(7,9),(10,12),(11,14),(13,15),(16,17)],
[(4,5),(6,8),(7,10),(9,12),(11,13),(14,15)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16)],
],
21: [
[(0,7),(1,10),(3,5),(4,8),(6,13),(9,19),(11,14),(12,17),(15,16),(18,20)],
[(0,11),(1,15),(2,12),(3,4),(5,8),(6,9),(7,14),(10,16),(13,19),(17,20)],
[(0,6),(1,3),(2,18),(4,15),(5,10),(8,16),(11,17),(12,13),(14,20)],
[(2,6),(5,12),(7,18),(8,14),(9,11),(10,17),(13,19),(16,20)],
[(1,2),(4,7),(5,9),(6,17),(10,13),(11,12),(14,19),(15,18)],
[(0,2),(3,6),(4,5),(7,10),(8,11),(9,15),(12,16),(13,18),(14,17),(19,20)],
[(0,1),(2,3),(5,9),(6,12),(7,8),(11,14),(13,15),(16,19),(17,18)],
[(1,2),(3,9),(6,13),(10,11),(12,15),(16,17),(18,19)],
[(1,4),(2,5),(3,7),(6,10),(8,9),(11,12),(13,14),(17,18)],
[(2,4),(5,6),(7,8),(9,11),(10,13),(12,15),(14,16)],
[(3,4),(5,7),(6,8),(9,10),(11,13),(12,14),(15,16)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17)],
],
22: [
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21)],
[(0,12),(1,13),(2,6),(3,7),(4,10),(8,20),(9,21),(11,17),(14,18),(15,19)],
[(0,2),(1,6),(3,12),(4,16),(5,17),(7,13),(8,14),(9,18),(15,20),(19,21)],
[(0,8),(1,15),(2,14),(3,9),(5,11),(6,20),(7,19),(10,16),(12,18),(13,21)],
[(0,4),(1,10),(3,8),(5,9),(7,14),(11,20),(12,16),(13,18),(17,21)],
[(1,3),(2,5),(4,8),(6,9),(7,10),(11,14),(12,15),(13,17),(16,19),(18,20)],
[(2,4),(3,12),(5,8),(6,11),(9,18),(10,15),(13,16),(17,19)],
[(1,2),(3,4),(5,7),(6,12),(8,11),(9,15),(10,13),(14,16),(17,18),(19,20)],
[(2,3),(4,5),(7,12),(8,10),(9,14),(11,13),(16,17),(18,19)],
[(4,6),(5,8),(9,11),(10,12),(13,16),(15,17)],
[(3,4),(6,7),(9,10),(11,12),(14,15),(17,18)],
[(5,6),(7,8),(10,11),(13,14),(15,16)],
[(6,7),(8,9),(12,13),(14,15)],
],
23: [
[(0,20),(1,12),(2,16),(4,6),(5,10),(7,21),(8,14),(9,15),(11,22),(13,18),(17,19)],
[(0,3),(1,11),(2,7),(4,17),(5,13),(6,19),(8,9),(10,18),(12,22),(14,15),(16,21)],
[(0,1),(2,4),(3,12),(5,8),(6,9),(7,10),(11,20),(13,16),(14,17),(15,18),(19,21)],
[(2,5),(4,8),(6,11),(7,14),(9,16),(12,17),(15,19),(18,21)],
[(1,8),(3,14),(4,7),(9,20),(10,12),(11,13),(15,22),(16,19)],
[(0,7),(1,5),(3,4),(6,11),(8,15),(9,14),(10,13),(12,17),(18,22),(19,20)],
[(0,2),(1,6),(4,7),(5,9),(8,10),(13,15),(14,18),(16,19),(17,22),(20,21)],
[(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,16),(15,17),(18,19),(21,22)],
[(1,2),(3,6),(4,10),(7,8),(9,11),(12,14),(13,19),(15,16),(17,20)],
[(2,3),(5,10),(6,7),(8,9),(13,18),(14,15),(16,17),(20,21)],
[(3,4),(5,7),(10,12),(11,13),(16,18),(19,20)],
[(4,6),(8,10),(9,12),(11,14),(13,15),(17,19)],
[(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18)],
],
24: [
[(0,20),(1,12),(2,16),(3,23),(4,6),(5,10),(7,21),(8,14),(9,15),(11,22),(13,18),(17,19)],
[(0,3),(1,11),(2,7),(4,17),(5,13),(6,19),(8,9),(10,18),(12,22),(14,15),(16,21),(20,23)],
[(0,1),(2,4),(3,12),(5,8),(6,9),(7,10),(11,20),(13,16),(14,17),(15,18),(19,21),(22,23)],
[(2,5),(4,8),(6,11),(7,14),(9,16),(12,17),(15,19),(18,21)],
[(1,8),(3,14),(4,7),(9,20),(10,12),(11,13),(15,22),(16,19)],
[(0,7),(1,5),(3,4),(6,11),(8,15),(9,14),(10,13),(12,17),(16,23),(18,22),(19,20)],
[(0,2),(1,6),(4,7),(5,9),(8,10),(13,15),(14,18),(16,19),(17,22),(21,23)],
[(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,16),(15,17),(18,19),(20,21)],
[(1,2),(3,6),(4,10),(7,8),(9,11),(12,14),(13,19),(15,16),(17,20),(21,22)],
[(2,3),(5,10),(6,7),(8,9),(13,18),(14,15),(16,17),(20,21)],
[(3,4),(5,7),(10,12),(11,13),(16,18),(19,20)],
[(4,6),(8,10),(9,12),(11,14),(13,15),(17,19)],
[(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18)],
],
25: [
[(0,2),(1,8),(3,18),(4,17),(5,20),(6,19),(7,9),(10,11),(12,13),(14,16),(15,22),(21,23)],
[(0,3),(1,15),(2,18),(4,12),(5,21),(6,10),(7,14),(8,22),(9,16),(11,19),(13,17),(20,23)],
[(0,4),(1,7),(2,13),(3,12),(5,6),(8,14),(9,15),(10,21),(11,20),(16,22),(17,18),(19,23)],
[(0,5),(2,11),(3,6),(4,10),(7,16),(8,9),(12,21),(13,19),(14,15),(17,20),(18,23)],
[(2,7),(6,9),(8,11),(14,24),(18,21)],
[(3,8),(7,10),(11,12),(13,14),(15,21),(18,20),(22,24)],
[(4,13),(10,16),(11,15),(18,24),(19,22)],
[(1,4),(8,11),(9,19),(13,17),(14,18),(16,20),(23,24)],
[(0,1),(4,5),(6,13),(9,14),(10,17),(12,16),(18,19),(20,21),(22,23)],
[(2,6),(3,4),(5,13),(7,9),(12,18),(15,17),(16,19),(20,22),(21,23)],
[(1,2),(5,8),(6,7),(9,10),(11,13),(14,15),(17,20),(21,22)],
[(1,3),(2,4),(5,6),(7,11),(8,9),(10,13),(12,14),(15,16),(17,18),(19,20)],
[(2,3),(4,8),(6,7),(9,12),(10,11),(13,14),(15,17),(16,18),(20,21)],
[(3,5),(4,6),(7,8),(9,10),(11,12),(13,15),(14,17),(16,19)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19)],
],
26: [
[(0,25),(1,3),(2,9),(4,19),(5,18),(6,21),(7,20),(8,10),(11,12),(13,14),(15,17),(16,23),(22,24)],
[(1,4),(2,16),(3,19),(5,13),(6,22),(7,11),(8,15),(9,23),(10,17),(12,20),(14,18),(21,24)],
[(1,5),(2,8),(3,14),(4,13),(6,7),(9,15),(10,16),(11,22),(12,21),(17,23),(18,19),(20,24)],
[(0,10),(1,6),(3,7),(4,11),(5,12),(13,20),(14,21),(15,25),(18,22),(19,24)],
[(0,4),(8,10),(12,13),(15,17),(21,25)],
[(0,2),(4,8),(10,12),(13,15),(17,21),(23,25)],
[(0,1),(2,3),(4,5),(8,14),(9,13),(11,17),(12,16),(20,21),(22,23),(24,25)],
[(1,4),(3,10),(6,9),(7,13),(8,11),(12,18),(14,17),(15,22),(16,19),(21,24)],
[(2,6),(3,8),(5,7),(9,12),(13,16),(17,22),(18,20),(19,23)],
[(1,2),(4,6),(5,9),(7,10),(11,12),(13,14),(15,18),(16,20),(19,21),(23,24)],
[(2,4),(3,5),(7,13),(8,9),(10,14),(11,15),(12,18),(16,17),(20,22),(21,23)],
[(3,4),(6,9),(7,11),(10,12),(13,15),(14,18),(16,19),(21,22)],
[(5,7),(6,8),(9,13),(10,11),(12,16),(14,15),(17,19),(18,20)],
[(5,6),(7,8),(9,10),(11,13),(12,14),(15,16),(17,18),(19,20)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21)],
],
27: [
[(0,9),(1,6),(2,4),(3,7),(5,8),(11,24),(12,23),(13,26),(14,25),(15,19),(16,17),(18,22),(20,21)],
[(0,1),(3,5),(4,10),(6,9),(7,8),(11,16),(12,18),(13,20),(14,15),(17,24),(19,25),(21,26),(22,23)],
[(1,3),(2,5),(4,7),(8,10),(11,12),(13,14),(15,16),(17,19),(18,20),(21,22),(23,24),(25,26)],
[(0,4),(1,2),(3,7),(5,9),(6,8),(11,13),(12,14),(15,21),(16,22),(17,18),(19,20),(23,25),(24,26)],
[(0,1),(2,6),(4,5),(7,8),(9,10),(12,13),(14,23),(15,17),(16,18),(19,21),(20,22),(24,25)],
[(0,11),(2,4),(3,6),(5,7),(8,9),(12,15),(13,17),(16,19),(18,21),(20,24),(22,25)],
[(1,2),(3,4),(5,6),(7,8),(13,15),(14,17),(20,23),(22,24)],
[(1,12),(2,3),(4,5),(6,7),(14,16),(17,19),(18,20),(21,23)],
[(2,13),(14,15),(16,17),(18,19),(20,21),(22,23)],
[(3,14),(4,15),(5,16),(10,21),(17,18),(19,20)],
[(6,17),(7,18),(8,19),(9,20),(10,13),(14,22),(15,23),(16,24)],
[(6,10),(7,14),(8,11),(9,12),(17,25),(18,26),(19,23),(20,24)],
[(4,8),(5,9),(11,15),(12,16),(13,17),(18,22),(21,25),(24,26)],
[(2,4),(3,5),(6,8),(7,9),(10,11),(12,14),(13,15),(16,18),(17,19),(20,22),(21,23),(25,26)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24)],
],
28: [
[(0,9),(1,20),(2,21),(3,22),(4,19),(5,24),(6,25),(7,26),(8,23),(10,15),(11,13),(12,17),(14,16),(18,27)],
[(0,18),(1,7),(2,6),(3,5),(4,8),(9,27),(10,12),(11,14),(13,16),(15,17),(19,23),(20,26),(21,25),(22,24)],
[(1,2),(3,4),(5,19),(6,20),(7,21),(8,22),(9,18),(10,11),(12,14),(13,15),(16,17),(23,24),(25,26)],
[(0,3),(1,10),(5,8),(6,7),(11,13),(14,16),(17,26),(19,22),(20,21),(24,27)],
[(0,1),(2,7),(3,10),(4,8),(12,13),(14,15),(17,24),(19,23),(20,25),(26,27)],
[(1,3),(2,6),(4,5),(7,19),(8,20),(11,12),(13,14),(15,16),(21,25),(22,23),(24,26)],
[(2,4),(5,12),(7,8),(9,11),(10,14),(13,17),(15,22),(16,18),(19,20),(23,25)],
[(2,9),(4,11),(5,6),(7,13),(8,10),(14,20),(16,23),(17,19),(18,25),(21,22)],
[(1,2),(3,16),(4,9),(6,12),(10,14),(11,24),(13,17),(15,21),(18,23),(25,26)],
[(2,8),(3,5),(4,7),(6,16),(9,15),(11,21),(12,18),(19,25),(20,23),(22,24)],
[(2,3),(5,8),(7,9),(11,15),(12,16),(18,20),(19,22),(24,25)],
[(6,8),(10,12),(11,13),(14,16),(15,17),(19,21)],
[(5,6),(8,10),(9,11),(12,13),(14,15),(16,18),(17,19),(21,22)],
[(4,5),(6,7),(8,9),(10,11),(12,14),(13,15),(16,17),(18,19),(20,21),(22,23)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24)],
],
29: [
[(0,12),(1,10),(2,9),(3,7),(5,11),(6,8),(13,26),(14,25),(15,28),(16,27),(17,21),(18,19),(20,24),(22,23)],
[(1,6),(2,3),(4,11),(7,9),(8,10),(13,18),(14,20),(15,22),(16,17),(19,26),(21,27),(23,28),(24,25)],
[(0,4),(1,2),(3,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,21),(20,22),(23,24),(25,26),(27,28)],
[(4,6),(5,9),(8,11),(10,12),(13,15),(14,16),(17,23),(18,24),(19,20),(21,22),(25,27),(26,28)],
[(0,5),(3,8),(4,7),(6,11),(9,10),(14,15),(16,25),(17,19),(18,20),(21,23),(22,24),(26,27)],
[(0,1),(2,5),(6,9),(7,8),(10,11),(14,17),(15,19),(18,21),(20,23),(22,26),(24,27)],
[(0,13),(1,3),(2,4),(5,6),(9,10),(15,17),(16,19),(22,25),(24,26)],
[(1,2),(3,4),(5,7),(6,8),(16,18),(19,21),(20,22),(23,25)],
[(1,14),(2,3),(4,5),(6,7),(8,9),(16,17),(18,19),(20,21),(22,23),(24,25)],
[(2,15),(3,4),(5,6),(10,23),(11,24),(12,25),(19,20),(21,22)],
[(3,16),(4,17),(5,18),(6,19),(7,20),(8,21),(9,22),(10,15)],
[(6,10),(8,13),(9,14),(11,16),(12,17),(18,26),(19,27),(20,28)],
[(4,8),(5,9),(7,11),(12,13),(14,18),(15,19),(16,20),(17,21),(22,26),(23,27),(24,28)],
[(2,4),(3,5),(6,8),(7,9),(10,12),(11,14),(13,15),(16,18),(17,19),(20,22),(21,23),(24,26),(25,27)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28)],
],
30: [
[(1,2),(3,10),(4,14),(5,8),(6,13),(7,12),(9,11),(16,17),(18,25),(19,29),(20,23),(21,28),(22,27),(24,26)],
[(0,14),(1,5),(2,8),(3,7),(6,9),(10,12),(11,13),(15,29),(16,20),(17,23),(18,22),(21,24),(25,27),(26,28)],
[(0,7),(1,6),(2,9),(4,10),(5,11),(8,13),(12,14),(15,22),(16,21),(17,24),(19,25),(20,26),(23,28),(27,29)],
[(0,6),(2,4),(3,5),(7,11),(8,10),(9,12),(13,14),(15,21),(17,19),(18,20),(22,26),(23,25),(24,27),(28,29)],
[(0,3),(1,2),(4,7),(5,9),(6,8),(10,11),(12,13),(14,29),(15,18),(16,17),(19,22),(20,24),(21,23),(25,26),(27,28)],
[(0,1),(2,3),(4,6),(7,9),(10,12),(11,13),(15,16),(17,18),(19,21),(22,24),(25,27),(26,28)],
[(0,15),(1,2),(3,5),(8,10),(11,12),(13,28),(16,17),(18,20),(23,25),(26,27)],
[(1,16),(3,4),(5,6),(7,8),(9,10),(12,27),(18,19),(20,21),(22,23),(24,25)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(17,18),(19,20),(21,22),(23,24),(25,26)],
[(2,17),(3,18),(4,19),(5,6),(7,8),(9,24),(10,25),(11,26),(20,21),(22,23)],
[(5,20),(6,21),(7,22),(8,23),(9,16),(10,17),(11,18),(12,19)],
[(5,9),(6,10),(7,11),(8,15),(13,20),(14,21),(18,22),(19,23)],
[(3,5),(4,8),(7,9),(12,15),(13,16),(14,17),(20,24),(21,25)],
[(2,4),(6,8),(10,12),(11,13),(14,15),(16,18),(17,19),(20,22),(21,23),(24,26),(25,27)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28)],
],
31: [
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29)],
[(0,2),(1,3),(4,6),(5,7),(8,10),(9,11),(12,14),(13,15),(16,18),(17,19),(20,22),(21,23),(24,26),(25,27),(28,30)],
[(0,4),(1,5),(2,6),(3,7),(8,12),(9,13),(10,14),(11,15),(16,20),(17,21),(18,22),(19,23),(24,28),(25,29),(26,30)],
[(0,8),(1,9),(2,10),(3,11),(4,12),(5,13),(6,14),(7,15),(16,24),(17,25),(18,26),(19,27),(20,28),(21,29),(22,30)],
[(0,16),(1,8),(2,4),(3,12),(5,10),(6,9),(7,14),(11,13),(17,24),(18,20),(19,28),(21,26),(22,25),(23,30),(27,29)],
[(1,2),(3,5),(4,8),(6,22),(7,11),(9,25),(10,12),(13,14),(17,18),(19,21),(20,24),(23,27),(26,28),(29,30)],
[(1,17),(2,18),(3,19),(4,20),(5,10),(7,23),(8,24),(11,27),(12,28),(13,29),(14,30),(21,26)],
[(3,17),(4,16),(5,21),(6,18),(7,9),(8,20),(10,26),(11,23),(13,25),(14,28),(15,27),(22,24)],
[(1,4),(3,8),(5,16),(7,17),(9,21),(10,22),(11,19),(12,20),(14,24),(15,26),(23,28),(27,30)],
[(2,5),(7,8),(9,18),(11,17),(12,16),(13,22),(14,20),(15,19),(23,24),(26,29)],
[(2,4),(6,12),(9,16),(10,11),(13,17),(14,18),(15,22),(19,25),(20,21),(27,29)],
[(5,6),(8,12),(9,10),(11,13),(14,16),(15,17),(18,20),(19,23),(21,22),(25,26)],
[(3,5),(6,7),(8,9),(10,12),(11,14),(13,16),(15,18),(17,20),(19,21),(22,23),(24,25),(26,28)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28)],
],
32: [
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31)],
[(0,2),(1,3),(4,6),(5,7),(8,10),(9,11),(12,14),(13,15),(16,18),(17,19),(20,22),(21,23),(24,26),(25,27),(28,30),(29,31)],
[(0,4),(1,5),(2,6),(3,7),(8,12),(9,13),(10,14),(11,15),(16,20),(17,21),(18,22),(19,23),(24,28),(25,29),(26,30),(27,31)],
[(0,8),(1,9),(2,10),(3,11),(4,12),(5,13),(6,14),(7,15),(16,24),(17,25),(18,26),(19,27),(20,28),(21,29),(22,30),(23,31)],
[(0,16),(1,8),(2,4),(3,12),(5,10),(6,9),(7,14),(11,13),(15,31),(17,24),(18,20),(19,28),(21,26),(22,25),(23,30),(27,29)],
[(1,2),(3,5),(4,8),(6,22),(7,11),(9,25),(10,12),(13,14),(17,18),(19,21),(20,24),(23,27),(26,28),(29,30)],
[(1,17),(2,18),(3,19),(4,20),(5,10),(7,23),(8,24),(11,27),(12,28),(13,29),(14,30),(21,26)],
[(3,17),(4,16),(5,21),(6,18),(7,9),(8,20),(10,26),(11,23),(13,25),(14,28),(15,27),(22,24)],
[(1,4),(3,8),(5,16),(7,17),(9,21),(10,22),(11,19),(12,20),(14,24),(15,26),(23,28),(27,30)],
[(2,5),(7,8),(9,18),(11,17),(12,16),(13,22),(14,20),(15,19),(23,24),(26,29)],
[(2,4),(6,12),(9,16),(10,11),(13,17),(14,18),(15,22),(19,25),(20,21),(27,29)],
[(5,6),(8,12),(9,10),(11,13),(14,16),(15,17),(18,20),(19,23),(21,22),(25,26)],
[(3,5),(6,7),(8,9),(10,12),(11,14),(13,16),(15,18),(17,20),(19,21),(22,23),(24,25),(26,28)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28)],
],
33: [
[(0,22),(2,30),(4,20),(6,10),(8,12),(16,24),(18,32),(26,28),(1,27),(3,25),(5,31),(7,29),(9,17),(11,13),(15,23),(19,21)],
[(0,12),(2,26),(4,16),(8,28),(10,30),(14,22),(1,11),(3,15),(5,19),(7,9),(13,27),(17,29),(21,31),(23,25)],
[(0,16),(6,14),(8,18),(12,32),(20,22),(24,28),(1,3),(5,7),(9,11),(13,17),(15,19),(21,23),(25,27),(29,31)],
[(0,4),(2,8),(10,12),(14,26),(16,18),(20,24),(22,28),(30,32),(1,5),(3,7),(9,21),(11,23),(13,15),(17,19),(25,29),(27,31)],
[(0,6),(4,10),(12,22),(14,20),(18,26),(24,30),(28,32),(3,5),(7,25),(9,13),(11,15),(17,21),(19,23),(27,29)],
[(0,2),(6,8),(10,20),(12,18),(14,16),(22,30),(26,28),(3,9),(5,13),(11,17),(15,21),(19,27),(23,29)],
[(2,4),(6,14),(8,16),(12,24),(22,26),(28,30),(5,9),(7,13),(19,25),(23,27),(0,1)],
[(2,6),(4,14),(8,10),(18,22),(20,24),(26,28),(7,11),(13,17),(15,19),(21,25),(30,31),(1,32)],
[(4,6),(8,12),(10,14),(16,20),(7,9),(11,13),(15,17),(19,21),(23,25),(2,3),(28,29)],
[(6,8),(12,16),(14,18),(20,24),(13,15),(17,19),(4,5)],
[(10,12),(14,16),(18,20),(22,24),(6,7)],
[(8,10),(12,14),(16,18),(20,22),(24,26)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32)],
],
34: [
[(0,13),(1,12),(2,15),(3,14),(4,8),(5,6),(7,11),(9,10),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33)],
[(0,5),(1,7),(2,9),(3,4),(6,13),(8,14),(10,15),(11,12),(17,21),(18,22),(19,23),(20,26),(24,32),(25,33),(28,30),(29,31)],
[(0,1),(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,15),(16,24),(17,26),(18,28),(19,30),(22,29),(23,31),(25,32),(27,33)],
[(0,2),(1,3),(4,10),(5,11),(6,7),(8,9),(12,14),(13,15),(16,20),(17,25),(21,33),(24,27),(26,32)],
[(1,2),(3,12),(4,6),(5,7),(8,10),(9,11),(13,14),(16,18),(17,22),(20,26),(21,25),(30,32),(31,33)],
[(1,4),(2,6),(5,8),(7,10),(9,13),(11,14),(17,18),(19,26),(20,28),(21,23),(22,30),(25,29),(31,32)],
[(2,4),(3,6),(9,12),(11,13),(19,24),(21,28),(23,27),(25,26),(1,33)],
[(3,5),(6,8),(7,9),(10,12),(19,20),(22,24),(23,30),(25,28),(27,29)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(17,19),(18,20),(23,25),(24,28),(27,31),(29,32)],
[(6,7),(8,9),(18,19),(20,21),(22,23),(26,27),(28,30),(29,31),(0,32),(1,17)],
[(20,22),(21,24),(25,26),(27,30),(0,16),(2,18),(15,31)],
[(19,20),(21,23),(24,25),(26,28),(29,30)],
[(21,22),(23,24),(25,26),(27,28),(4,20),(14,30),(13,29),(3,19)],
[(8,24),(12,28),(10,26),(6,22),(9,25),(5,21),(11,27),(7,23)],
[(8,16),(24,32),(12,20),(10,18),(14,22),(9,17),(25,33),(13,21),(11,19),(15,23)],
[(4,8),(12,16),(20,24),(28,32),(6,10),(14,18),(22,26),(5,9),(13,17),(21,25),(29,33),(7,11),(15,19),(23,27)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32)],
],
35: [
[(0,13),(1,12),(2,15),(3,14),(4,8),(5,6),(7,11),(9,10),(16,28),(17,20),(18,24),(19,21),(22,33),(23,27),(25,30),(26,29),(31,32)],
[(0,5),(1,7),(2,9),(3,4),(6,13),(8,14),(10,15),(11,12),(16,18),(17,23),(19,22),(20,27),(21,33),(24,28),(26,31),(29,32),(30,34)],
[(0,1),(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,15),(19,26),(20,30),(21,31),(22,29),(23,25),(27,33),(32,34)],
[(0,2),(1,3),(4,10),(5,11),(6,7),(8,9),(12,14),(13,15),(16,23),(17,26),(20,22),(25,31),(27,32),(28,33),(29,30)],
[(1,2),(3,12),(4,6),(5,7),(8,10),(9,11),(13,14),(16,19),(18,22),(21,23),(24,27),(28,32)],
[(1,4),(2,6),(5,8),(7,10),(9,13),(11,14),(17,24),(18,25),(19,20),(22,31),(23,29),(26,27),(28,34)],
[(2,4),(3,6),(9,12),(11,13),(17,19),(18,21),(22,25),(23,28),(24,26),(27,30),(33,34)],
[(3,5),(6,8),(7,9),(10,12),(16,17),(18,19),(20,24),(22,26),(25,28),(30,31),(32,33),(2,34)],
[(3,4),(5,6),(7,8),(9,10),(11,12),(17,18),(21,24),(22,23),(25,27),(26,29),(30,32),(31,33)],
[(6,7),(8,9),(19,22),(20,21),(23,25),(24,26),(27,28),(29,30),(31,32),(1,33)],
[(19,20),(21,22),(23,24),(25,26),(27,29),(28,30),(0,32),(1,17)],
[(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(0,16)],
[(8,24),(4,20),(12,28),(2,18),(10,26),(6,22),(14,30),(9,25),(5,21),(13,29),(3,19),(11,27),(7,23),(15,31)],
[(8,16),(24,32),(12,20),(10,18),(26,34),(14,22),(9,17),(25,33),(13,21),(11,19),(15,23)],
[(4,8),(12,16),(20,24),(28,32),(6,10),(14,18),(22,26),(30,34),(5,9),(13,17),(21,25),(29,33),(7,11),(15,19),(23,27)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34)],
],
36: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35)],
[(2,10),(4,12),(6,14),(8,20),(16,32),(18,34),(24,28),(26,30),(3,11),(5,13),(7,15),(9,21),(17,33),(19,35),(25,29),(27,31)],
[(0,16),(2,20),(4,24),(6,28),(12,26),(14,30),(18,32),(22,34),(1,17),(3,21),(5,25),(7,29),(13,27),(15,31),(19,33),(23,35)],
[(0,8),(2,18),(10,34),(16,22),(20,32),(1,9),(3,19),(11,35),(17,23),(21,33)],
[(0,4),(2,12),(8,20),(10,18),(28,32),(30,34),(1,5),(3,13),(9,21),(11,19),(29,33),(31,35)],
[(2,4),(6,20),(8,24),(10,14),(12,28),(18,26),(30,32),(3,5),(7,21),(9,25),(11,15),(13,29),(19,27),(31,33),(0,1),(34,35)],
[(6,16),(10,24),(14,22),(18,20),(7,17),(11,25),(15,23),(19,21)],
[(6,8),(12,16),(14,28),(18,24),(22,26),(7,9),(13,17),(15,29),(19,25),(23,27)],
[(2,6),(4,8),(14,18),(16,24),(22,30),(26,32),(3,7),(5,9),(15,19),(17,25),(23,31),(27,33)],
[(4,6),(8,10),(12,14),(20,22),(24,28),(26,30),(5,7),(9,11),(13,15),(21,23),(25,29),(27,31),(2,3),(32,33)],
[(8,12),(10,16),(18,20),(22,28),(9,13),(11,17),(19,21),(23,29),(4,5),(30,31),(1,32),(3,34)],
[(6,8),(10,14),(16,18),(20,24),(26,28),(7,9),(11,15),(17,19),(21,25),(27,29)],
[(10,12),(14,16),(18,20),(22,24),(11,13),(15,17),(19,21),(23,25),(6,7),(8,9),(26,27),(28,29)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34)],
],
37: [
[(0,24),(2,8),(4,16),(6,10),(12,34),(14,22),(18,28),(20,26),(30,32),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35)],
[(0,4),(2,14),(6,12),(8,22),(10,34),(16,24),(20,30),(26,32),(28,36),(3,11),(5,13),(7,15),(9,21),(17,33),(19,35),(25,29),(27,31)],
[(6,20),(8,28),(10,30),(12,26),(14,18),(22,34),(32,36),(1,17),(3,21),(5,25),(7,29),(13,27),(15,31),(19,33),(23,35)],
[(0,14),(2,20),(8,12),(18,30),(22,32),(24,34),(26,28),(1,9),(3,19),(11,35),(17,23),(21,33)],
[(0,6),(4,12),(10,14),(16,22),(24,32),(1,5),(3,13),(9,21),(11,19),(29,33),(31,35)],
[(2,16),(4,18),(6,8),(12,30),(14,26),(20,22),(24,36),(3,5),(7,21),(9,25),(11,15),(13,29),(19,27),(31,33)],
[(2,6),(4,10),(12,18),(14,24),(16,20),(22,28),(34,36),(7,17),(11,25),(15,23),(19,21)],
[(0,2),(4,6),(8,16),(12,20),(18,24),(28,30),(32,34),(7,9),(13,17),(15,29),(19,25),(23,27)],
[(2,4),(10,16),(12,14),(18,22),(20,26),(28,32),(30,34),(3,7),(5,9),(15,19),(17,25),(23,31),(27,33),(0,1)],
[(6,12),(8,10),(14,18),(16,20),(22,24),(26,28),(30,32),(5,7),(9,11),(13,15),(21,23),(25,29),(27,31),(2,3),(34,35)],
[(6,8),(10,12),(14,16),(18,20),(22,26),(24,28),(9,13),(11,17),(19,21),(23,29),(32,33),(3,34)],
[(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(7,9),(11,15),(17,19),(21,25),(27,29),(1,32)],
[(11,13),(15,17),(19,21),(23,25),(4,5),(6,7),(8,9),(26,27),(28,29),(30,31)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(5,36)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36)],
],
38: [
[(0,24),(2,8),(4,16),(6,10),(12,34),(14,22),(18,28),(20,26),(30,32),(1,25),(3,9),(5,17),(7,11),(13,35),(15,23),(19,29),(21,27),(31,33)],
[(0,4),(2,14),(6,12),(8,22),(10,34),(16,24),(20,30),(26,32),(28,36),(1,5),(3,15),(7,13),(9,23),(11,35),(17,25),(21,31),(27,33),(29,37)],
[(6,20),(8,28),(10,30),(12,26),(14,18),(22,34),(32,36),(7,21),(9,29),(11,31),(13,27),(15,19),(23,35),(33,37)],
[(0,14),(2,20),(8,12),(18,30),(22,32),(24,34),(26,28),(1,15),(3,21),(9,13),(19,31),(23,33),(25,35),(27,29)],
[(0,6),(4,12),(10,14),(16,22),(24,32),(1,7),(5,13),(11,15),(17,23),(25,33)],
[(2,16),(4,18),(6,8),(12,30),(14,26),(20,22),(24,36),(3,17),(5,19),(7,9),(13,31),(15,27),(21,23),(25,37)],
[(2,6),(4,10),(12,18),(14,24),(16,20),(22,28),(34,36),(3,7),(5,11),(13,19),(15,25),(17,21),(23,29),(35,37)],
[(0,2),(4,6),(8,16),(12,20),(18,24),(28,30),(32,34),(1,3),(5,7),(9,17),(13,21),(19,25),(29,31),(33,35),(36,37)],
[(2,4),(10,16),(12,14),(18,22),(20,26),(28,32),(30,34),(3,5),(11,17),(13,15),(19,23),(21,27),(29,33),(31,35),(0,1)],
[(6,12),(8,10),(14,18),(16,20),(22,24),(26,28),(30,32),(7,13),(9,11),(15,19),(17,21),(23,25),(27,29),(31,33),(2,3),(34,35)],
[(6,8),(10,12),(14,16),(18,20),(22,26),(24,28),(7,9),(11,13),(15,17),(19,21),(23,27),(25,29),(32,33),(3,34)],
[(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(1,32)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31)],
[(5,36),(1,16),(3,18),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34)],
[(5,20),(21,36),(1,8),(3,10),(7,14),(9,16),(11,18),(15,22),(17,24),(19,26),(23,30),(25,32),(27,34)],
[(5,12),(13,20),(21,28),(29,36),(1,4),(3,6),(7,10),(11,14),(15,18),(19,22),(23,26),(27,30),(31,34)],
[(5,8),(9,12),(13,16),(17,20),(21,24),(25,28),(29,32),(33,36),(1,2),(3,4)],
[(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36)],
],
39: [
[(0,6),(2,14),(4,10),(8,16),(12,18),(20,26),(22,30),(24,36),(28,34),(32,38),(1,25),(3,9),(5,17),(7,11),(13,35),(15,23),(19,29),(21,27),(31,33)],
[(0,28),(2,22),(4,32),(6,34),(8,24),(10,38),(12,20),(14,30),(16,36),(18,26),(1,5),(3,15),(7,13),(9,23),(11,35),(17,25),(21,31),(27,33),(29,37)],
[(0,8),(2,4),(6,16),(10,14),(22,32),(24,28),(30,38),(34,36),(7,21),(9,29),(11,31),(13,27),(15,19),(23,35),(33,37)],
[(2,12),(4,24),(6,10),(8,22),(14,34),(16,30),(26,36),(28,32),(1,15),(3,21),(9,13),(19,31),(23,33),(25,35),(27,29)],
[(0,2),(4,12),(14,20),(18,24),(26,34),(36,38),(1,7),(5,13),(11,15),(17,23),(25,33)],
[(2,12),(10,18),(14,22),(16,24),(20,28),(26,36),(3,17),(5,19),(7,9),(13,31),(15,27),(21,23),(25,37)],
[(6,10),(8,14),(16,20),(18,22),(24,30),(28,32),(3,7),(5,11),(13,19),(15,25),(17,21),(23,29),(35,37)],
[(2,6),(4,8),(10,14),(12,20),(18,26),(24,28),(30,34),(32,36),(1,3),(5,7),(9,17),(13,21),(19,25),(29,31),(33,35)],
[(2,4),(6,8),(12,14),(16,18),(20,22),(24,26),(30,32),(34,36),(3,5),(11,17),(13,15),(19,23),(21,27),(29,33),(31,35),(0,1)],
[(4,6),(8,12),(10,16),(14,18),(20,24),(22,28),(26,30),(32,34),(7,13),(9,11),(15,19),(17,21),(23,25),(27,29),(31,33),(2,3),(36,37)],
[(8,10),(12,16),(14,20),(18,24),(22,26),(28,30),(7,9),(11,13),(15,17),(19,21),(23,27),(25,29),(34,35)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(3,34)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33)],
[(1,32),(5,36),(7,38),(3,18),(9,24),(11,26),(13,28),(15,30),(19,34)],
[(1,16),(5,20),(7,22),(17,32),(21,36),(23,38),(3,10),(11,18),(19,26),(27,34)],
[(1,8),(5,12),(7,14),(9,16),(13,20),(15,22),(17,24),(21,28),(23,30),(25,32),(29,36),(31,38),(3,6)],
[(1,4),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38)],
],
40: [
[(0,6),(2,14),(4,10),(8,16),(12,18),(20,26),(22,30),(24,36),(28,34),(32,38),(1,7),(3,15),(5,11),(9,17),(13,19),(21,27),(23,31),(25,37),(29,35),(33,39)],
[(0,28),(2,22),(4,32),(6,34),(8,24),(10,38),(12,20),(14,30),(16,36),(18,26),(1,29),(3,23),(5,33),(7,35),(9,25),(11,39),(13,21),(15,31),(17,37),(19,27)],
[(0,8),(2,4),(6,16),(10,14),(22,32),(24,28),(30,38),(34,36),(1,9),(3,5),(7,17),(11,15),(23,33),(25,29),(31,39),(35,37)],
[(2,12),(4,24),(6,10),(8,22),(14,34),(16,30),(26,36),(28,32),(3,13),(5,25),(7,11),(9,23),(15,35),(17,31),(27,37),(29,33)],
[(0,2),(4,12),(14,20),(18,24),(26,34),(36,38),(1,3),(5,13),(15,21),(19,25),(27,35),(37,39)],
[(2,12),(10,18),(14,22),(16,24),(20,28),(26,36),(3,13),(11,19),(15,23),(17,25),(21,29),(27,37),(0,1),(38,39)],
[(6,10),(8,14),(16,20),(18,22),(24,30),(28,32),(7,11),(9,15),(17,21),(19,23),(25,31),(29,33)],
[(2,6),(4,8),(10,14),(12,20),(18,26),(24,28),(30,34),(32,36),(3,7),(5,9),(11,15),(13,21),(19,27),(25,29),(31,35),(33,37)],
[(2,4),(6,8),(12,14),(16,18),(20,22),(24,26),(30,32),(34,36),(3,5),(7,9),(13,15),(17,19),(21,23),(25,27),(31,33),(35,37)],
[(4,6),(8,12),(10,16),(14,18),(20,24),(22,28),(26,30),(32,34),(5,7),(9,13),(11,17),(15,19),(21,25),(23,29),(27,31),(33,35),(2,3),(36,37)],
[(8,10),(12,16),(14,20),(18,24),(22,26),(28,30),(9,11),(13,17),(15,21),(19,25),(23,27),(29,31),(4,5),(34,35)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(3,34),(5,36)],
[(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33)],
[(1,32),(7,38),(3,18),(5,20),(9,24),(11,26),(13,28),(15,30),(19,34),(21,36)],
[(1,16),(7,22),(17,32),(23,38),(3,10),(5,12),(11,18),(13,20),(19,26),(21,28),(27,34),(29,36)],
[(1,8),(7,14),(9,16),(15,22),(17,24),(23,30),(25,32),(31,38),(3,6),(33,36)],
[(1,4),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(35,38)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38)],
],
41: [
[(0,14),(2,20),(6,10),(8,16),(12,26),(18,38),(22,28),(24,34),(30,32),(36,40),(1,7),(3,15),(5,11),(9,17),(13,19),(21,27),(23,31),(25,37),(29,35),(33,39)],
[(0,22),(2,30),(4,24),(6,8),(10,16),(12,18),(14,28),(20,32),(26,38),(34,40),(1,29),(3,23),(5,33),(7,35),(9,25),(11,39),(13,21),(15,31),(17,37),(19,27)],
[(0,12),(2,6),(4,36),(8,30),(10,20),(16,32),(22,34),(24,26),(28,40),(1,9),(3,5),(7,17),(11,15),(23,33),(25,29),(31,39),(35,37)],
[(4,12),(10,24),(14,36),(16,28),(18,22),(20,34),(26,38),(32,40),(3,13),(5,25),(7,11),(9,23),(15,35),(17,31),(27,37),(29,33)],
[(2,4),(8,14),(10,18),(12,34),(20,26),(22,24),(28,38),(30,36),(1,3),(5,13),(15,21),(19,25),(27,35),(37,39)],
[(0,4),(6,12),(8,10),(14,20),(16,22),(18,30),(24,32),(26,36),(28,34),(38,40),(3,13),(11,19),(15,23),(17,25),(21,29),(27,37)],
[(0,2),(4,6),(10,18),(12,24),(14,16),(22,28),(26,30),(32,38),(34,36),(7,11),(9,15),(17,21),(19,23),(25,31),(29,33)],
[(2,4),(6,18),(12,26),(20,22),(24,30),(32,34),(36,38),(3,7),(5,9),(11,15),(13,21),(19,27),(25,29),(31,35),(33,37),(0,1)],
[(2,8),(4,10),(6,14),(12,20),(16,18),(22,24),(26,28),(34,36),(3,5),(7,9),(13,15),(17,19),(21,23),(25,27),(31,33),(35,37),(38,39)],
[(4,8),(10,12),(14,16),(18,22),(20,26),(24,30),(28,32),(5,7),(9,13),(11,17),(15,19),(21,25),(23,29),(27,31),(33,35),(2,3),(36,37)],
[(6,8),(10,14),(12,16),(18,20),(22,26),(24,28),(30,32),(9,11),(13,17),(15,21),(19,25),(23,27),(29,31),(4,5)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(5,36)],
[(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35)],
[(1,32),(3,34),(7,38),(9,40),(5,20),(11,26),(13,28),(15,30),(21,36)],
[(1,16),(3,18),(7,22),(9,24),(17,32),(19,34),(23,38),(25,40),(5,12),(13,20),(21,28),(29,36)],
[(1,8),(3,10),(7,14),(9,16),(11,18),(15,22),(17,24),(19,26),(23,30),(25,32),(27,34),(31,38),(33,40)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40)],
],
42: [
[(0,14),(2,20),(6,10),(8,16),(12,26),(18,38),(22,28),(24,34),(30,32),(36,40),(1,15),(3,21),(7,11),(9,17),(13,27),(19,39),(23,29),(25,35),(31,33),(37,41)],
[(0,22),(2,30),(4,24),(6,8),(10,16),(12,18),(14,28),(20,32),(26,38),(34,40),(1,23),(3,31),(5,25),(7,9),(11,17),(13,19),(15,29),(21,33),(27,39),(35,41)],
[(0,12),(2,6),(4,36),(8,30),(10,20),(16,32),(22,34),(24,26),(28,40),(1,13),(3,7),(5,37),(9,31),(11,21),(17,33),(23,35),(25,27),(29,41)],
[(4,12),(10,24),(14,36),(16,28),(18,22),(20,34),(26,38),(32,40),(5,13),(11,25),(15,37),(17,29),(19,23),(21,35),(27,39),(33,41)],
[(2,4),(8,14),(10,18),(12,34),(20,26),(22,24),(28,38),(30,36),(3,5),(9,15),(11,19),(13,35),(21,27),(23,25),(29,39),(31,37)],
[(0,4),(6,12),(8,10),(14,20),(16,22),(18,30),(24,32),(26,36),(28,34),(38,40),(1,5),(7,13),(9,11),(15,21),(17,23),(19,31),(25,33),(27,37),(29,35),(39,41)],
[(0,2),(4,6),(10,18),(12,24),(14,16),(22,28),(26,30),(32,38),(34,36),(1,3),(5,7),(11,19),(13,25),(15,17),(23,29),(27,31),(33,39),(35,37),(40,41)],
[(2,4),(6,18),(12,26),(20,22),(24,30),(32,34),(36,38),(3,5),(7,19),(13,27),(21,23),(25,31),(33,35),(37,39),(0,1)],
[(2,8),(4,10),(6,14),(12,20),(16,18),(22,24),(26,28),(34,36),(3,9),(5,11),(7,15),(13,21),(17,19),(23,25),(27,29),(35,37),(38,39)],
[(4,8),(10,12),(14,16),(18,22),(20,26),(24,30),(28,32),(5,9),(11,13),(15,17),(19,23),(21,27),(25,31),(29,33),(2,3),(36,37)],
[(6,8),(10,14),(12,16),(18,20),(22,26),(24,28),(30,32),(7,9),(11,15),(13,17),(19,21),(23,27),(25,29),(31,33),(4,5)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(6,7),(5,36)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(7,38)],
[(1,32),(3,34),(9,40),(5,20),(7,22),(11,26),(13,28),(15,30),(21,36),(23,38)],
[(1,16),(3,18),(9,24),(17,32),(19,34),(25,40),(5,12),(7,14),(13,20),(15,22),(21,28),(23,30),(29,36),(31,38)],
[(1,8),(3,10),(9,16),(11,18),(17,24),(19,26),(25,32),(27,34),(33,40),(35,38)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(37,40)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40)],
],
43: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(1,15),(3,21),(7,11),(9,17),(13,27),(19,39),(23,29),(25,35),(31,33),(37,41)],
[(0,24),(2,26),(4,12),(6,14),(8,20),(16,40),(18,42),(22,34),(28,36),(30,38),(1,23),(3,31),(5,25),(7,9),(11,17),(13,19),(15,29),(21,33),(27,39),(35,41)],
[(0,4),(2,12),(6,24),(8,32),(10,34),(14,26),(16,28),(18,36),(30,40),(38,42),(1,13),(3,7),(5,37),(9,31),(11,21),(17,33),(23,35),(25,27),(29,41)],
[(0,16),(2,30),(4,28),(6,18),(10,22),(12,40),(14,38),(20,32),(24,36),(26,42),(5,13),(11,25),(15,37),(17,29),(19,23),(21,35),(27,39),(33,41)],
[(0,8),(2,20),(6,16),(10,18),(14,28),(22,40),(24,32),(26,36),(34,42),(3,5),(9,15),(11,19),(13,35),(21,27),(23,25),(29,39),(31,37)],
[(2,6),(4,10),(8,16),(12,18),(14,20),(22,28),(24,30),(26,34),(32,38),(36,40),(1,5),(7,13),(9,11),(15,21),(17,23),(19,31),(25,33),(27,37),(29,35),(39,41)],
[(4,8),(6,24),(10,16),(12,22),(18,36),(20,30),(26,32),(34,38),(1,3),(5,7),(11,19),(13,25),(15,17),(23,29),(27,31),(33,39),(35,37)],
[(2,4),(6,8),(10,14),(12,24),(16,22),(18,30),(20,26),(28,32),(34,36),(38,40),(3,5),(7,19),(13,27),(21,23),(25,31),(33,35),(37,39),(0,1)],
[(4,6),(8,10),(14,24),(16,20),(18,28),(22,26),(32,34),(36,38),(3,9),(5,11),(7,15),(13,21),(17,19),(23,25),(27,29),(35,37),(40,41)],
[(8,12),(10,16),(18,22),(20,24),(26,32),(30,34),(5,9),(11,13),(15,17),(19,23),(21,27),(25,31),(29,33),(2,3),(38,39)],
[(6,8),(12,14),(18,20),(22,24),(28,30),(34,36),(7,9),(11,15),(13,17),(19,21),(23,27),(25,29),(31,33),(4,5)],
[(10,12),(14,16),(20,22),(26,28),(30,32),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(6,7),(36,37)],
[(12,14),(16,18),(24,26),(28,30),(8,9),(10,11),(20,21),(22,23),(32,33),(34,35),(5,36),(7,38)],
[(12,13),(14,15),(16,17),(18,19),(24,25),(26,27),(28,29),(30,31),(1,32),(3,34),(9,40),(11,42),(5,20),(7,22),(21,36),(23,38)],
[(1,16),(3,18),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(25,40),(27,42),(5,12),(7,14),(29,36),(31,38)],
[(1,8),(3,10),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(33,40),(35,42)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42)],
],
44: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43)],
[(0,24),(2,26),(4,12),(6,14),(8,20),(16,40),(18,42),(22,34),(28,36),(30,38),(1,25),(3,27),(5,13),(7,15),(9,21),(17,41),(19,43),(23,35),(29,37),(31,39)],
[(0,4),(2,12),(6,24),(8,32),(10,34),(14,26),(16,28),(18,36),(30,40),(38,42),(1,5),(3,13),(7,25),(9,33),(11,35),(15,27),(17,29),(19,37),(31,41),(39,43)],
[(0,16),(2,30),(4,28),(6,18),(10,22),(12,40),(14,38),(20,32),(24,36),(26,42),(1,17),(3,31),(5,29),(7,19),(11,23),(13,41),(15,39),(21,33),(25,37),(27,43)],
[(0,8),(2,20),(6,16),(10,18),(14,28),(22,40),(24,32),(26,36),(34,42),(1,9),(3,21),(7,17),(11,19),(15,29),(23,41),(25,33),(27,37),(35,43)],
[(2,6),(4,10),(8,16),(12,18),(14,20),(22,28),(24,30),(26,34),(32,38),(36,40),(3,7),(5,11),(9,17),(13,19),(15,21),(23,29),(25,31),(27,35),(33,39),(37,41),(0,1),(42,43)],
[(4,8),(6,24),(10,16),(12,22),(18,36),(20,30),(26,32),(34,38),(5,9),(7,25),(11,17),(13,23),(19,37),(21,31),(27,33),(35,39)],
[(2,4),(6,8),(10,14),(12,24),(16,22),(18,30),(20,26),(28,32),(34,36),(38,40),(3,5),(7,9),(11,15),(13,25),(17,23),(19,31),(21,27),(29,33),(35,37),(39,41)],
[(4,6),(8,10),(14,24),(16,20),(18,28),(22,26),(32,34),(36,38),(5,7),(9,11),(15,25),(17,21),(19,29),(23,27),(33,35),(37,39),(2,3),(40,41)],
[(8,12),(10,16),(18,22),(20,24),(26,32),(30,34),(9,13),(11,17),(19,23),(21,25),(27,33),(31,35),(4,5),(38,39)],
[(6,8),(12,14),(18,20),(22,24),(28,30),(34,36),(7,9),(13,15),(19,21),(23,25),(29,31),(35,37)],
[(10,12),(14,16),(20,22),(26,28),(30,32),(11,13),(15,17),(21,23),(27,29),(31,33),(6,7),(8,9),(34,35),(36,37)],
[(12,14),(16,18),(24,26),(28,30),(13,15),(17,19),(25,27),(29,31),(10,11),(20,21),(22,23),(32,33),(3,34),(5,36),(7,38),(9,40)],
[(12,13),(14,15),(16,17),(18,19),(24,25),(26,27),(28,29),(30,31),(1,32),(11,42),(5,20),(7,22),(21,36),(23,38)],
[(1,16),(3,18),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(25,40),(27,42),(5,12),(7,14),(29,36),(31,38)],
[(1,8),(3,10),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(33,40),(35,42)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42)],
],
45: [
[(0,40),(2,24),(4,32),(8,12),(10,20),(14,42),(16,28),(18,30),(22,44),(26,36),(34,38),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43)],
[(0,6),(2,22),(4,14),(8,34),(10,26),(12,38),(16,18),(20,36),(24,44),(28,30),(32,42),(1,25),(3,27),(5,13),(7,15),(9,21),(17,41),(19,43),(23,35),(29,37),(31,39)],
[(0,2),(4,8),(6,24),(10,16),(12,18),(14,20),(22,40),(26,32),(28,34),(30,36),(38,42),(1,5),(3,13),(7,25),(9,33),(11,35),(15,27),(17,29),(19,37),(31,41),(39,43)],
[(4,10),(8,16),(12,22),(14,28),(18,32),(24,34),(30,38),(36,42),(1,17),(3,31),(5,29),(7,19),(11,23),(13,41),(15,39),(21,33),(25,37),(27,43)],
[(2,16),(6,28),(8,14),(18,40),(20,24),(22,26),(30,44),(32,38),(1,9),(3,21),(7,17),(11,19),(15,29),(23,41),(25,33),(27,37),(35,43)],
[(0,14),(2,10),(6,8),(12,22),(16,30),(18,28),(20,26),(24,34),(36,44),(38,40),(3,7),(5,11),(9,17),(13,19),(15,21),(23,29),(25,31),(27,35),(33,39),(37,41)],
[(0,4),(2,12),(8,14),(10,18),(16,20),(26,30),(28,36),(32,38),(34,44),(40,42),(5,9),(7,25),(11,17),(13,23),(19,37),(21,31),(27,33),(35,39)],
[(4,6),(8,10),(12,16),(14,18),(20,22),(24,26),(28,32),(30,34),(36,38),(42,44),(3,5),(7,9),(11,15),(13,25),(17,23),(19,31),(21,27),(29,33),(35,37),(39,41),(0,1)],
[(2,4),(6,12),(8,20),(14,16),(18,22),(24,28),(26,38),(30,32),(34,40),(5,7),(9,11),(15,25),(17,21),(19,29),(23,27),(33,35),(37,39)],
[(4,6),(10,20),(12,14),(16,18),(26,36),(28,30),(32,34),(40,42),(9,13),(11,17),(19,23),(21,25),(27,33),(31,35),(2,3)],
[(6,8),(10,14),(20,24),(22,26),(32,36),(38,40),(7,9),(13,15),(19,21),(23,25),(29,31),(35,37),(4,5),(42,43)],
[(8,12),(16,20),(18,24),(22,28),(26,30),(34,38),(11,13),(15,17),(21,23),(27,29),(31,33),(6,7),(40,41)],
[(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(13,15),(17,19),(25,27),(29,31),(8,9),(38,39)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(7,38),(9,40)],
[(1,32),(3,34),(5,36),(11,42),(13,44),(7,22),(9,24),(15,30),(23,38),(25,40)],
[(1,16),(3,18),(5,20),(11,26),(13,28),(17,32),(19,34),(21,36),(27,42),(29,44),(7,14),(15,22),(23,30),(31,38),(33,40)],
[(1,8),(3,10),(5,12),(9,16),(11,18),(13,20),(17,24),(19,26),(21,28),(25,32),(27,34),(29,36),(35,42),(37,44)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44)],
],
46: [
[(0,40),(2,24),(4,32),(8,12),(10,20),(14,42),(16,28),(18,30),(22,44),(26,36),(34,38),(1,41),(3,25),(5,33),(9,13),(11,21),(15,43),(17,29),(19,31),(23,45),(27,37),(35,39)],
[(0,6),(2,22),(4,14),(8,34),(10,26),(12,38),(16,18),(20,36),(24,44),(28,30),(32,42),(1,7),(3,23),(5,15),(9,35),(11,27),(13,39),(17,19),(21,37),(25,45),(29,31),(33,43)],
[(0,2),(4,8),(6,24),(10,16),(12,18),(14,20),(22,40),(26,32),(28,34),(30,36),(38,42),(1,3),(5,9),(7,25),(11,17),(13,19),(15,21),(23,41),(27,33),(29,35),(31,37),(39,43)],
[(4,10),(8,16),(12,22),(14,28),(18,32),(24,34),(30,38),(36,42),(5,11),(9,17),(13,23),(15,29),(19,33),(25,35),(31,39),(37,43)],
[(2,16),(6,28),(8,14),(18,40),(20,24),(22,26),(30,44),(32,38),(3,17),(7,29),(9,15),(19,41),(21,25),(23,27),(31,45),(33,39)],
[(0,14),(2,10),(6,8),(12,22),(16,30),(18,28),(20,26),(24,34),(36,44),(38,40),(1,15),(3,11),(7,9),(13,23),(17,31),(19,29),(21,27),(25,35),(37,45),(39,41)],
[(0,4),(2,12),(8,14),(10,18),(16,20),(26,30),(28,36),(32,38),(34,44),(40,42),(1,5),(3,13),(9,15),(11,19),(17,21),(27,31),(29,37),(33,39),(35,45),(41,43)],
[(4,6),(8,10),(12,16),(14,18),(20,22),(24,26),(28,32),(30,34),(36,38),(42,44),(5,7),(9,11),(13,17),(15,19),(21,23),(25,27),(29,33),(31,35),(37,39),(43,45),(0,1)],
[(2,4),(6,12),(8,20),(14,16),(18,22),(24,28),(26,38),(30,32),(34,40),(3,5),(7,13),(9,21),(15,17),(19,23),(25,29),(27,39),(31,33),(35,41),(44,45)],
[(4,6),(10,20),(12,14),(16,18),(26,36),(28,30),(32,34),(40,42),(5,7),(11,21),(13,15),(17,19),(27,37),(29,31),(33,35),(41,43),(2,3)],
[(6,8),(10,14),(20,24),(22,26),(32,36),(38,40),(7,9),(11,15),(21,25),(23,27),(33,37),(39,41),(4,5),(42,43)],
[(8,12),(16,20),(18,24),(22,28),(26,30),(34,38),(9,13),(17,21),(19,25),(23,29),(27,31),(35,39),(6,7),(40,41)],
[(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(8,9),(38,39)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(7,38),(9,40)],
[(1,32),(3,34),(5,36),(11,42),(13,44),(7,22),(9,24),(15,30),(23,38),(25,40)],
[(1,16),(3,18),(5,20),(11,26),(13,28),(17,32),(19,34),(21,36),(27,42),(29,44),(7,14),(15,22),(23,30),(31,38),(33,40)],
[(1,8),(3,10),(5,12),(9,16),(11,18),(13,20),(17,24),(19,26),(21,28),(25,32),(27,34),(29,36),(35,42),(37,44)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44)],
],
47: [
[(0,40),(2,24),(4,32),(6,46),(8,12),(10,20),(14,42),(16,28),(18,30),(22,44),(26,36),(34,38),(1,41),(3,25),(5,33),(9,13),(11,21),(15,43),(17,29),(19,31),(23,45),(27,37),(35,39)],
[(0,6),(2,22),(4,14),(8,34),(10,26),(12,38),(16,18),(20,36),(24,44),(28,30),(32,42),(40,46),(1,7),(3,23),(5,15),(9,35),(11,27),(13,39),(17,19),(21,37),(25,45),(29,31),(33,43)],
[(0,2),(4,8),(6,24),(10,16),(12,18),(14,20),(22,40),(26,32),(28,34),(30,36),(38,42),(44,46),(1,3),(5,9),(7,25),(11,17),(13,19),(15,21),(23,41),(27,33),(29,35),(31,37),(39,43)],
[(4,10),(8,16),(12,22),(14,28),(18,32),(24,34),(30,38),(36,42),(5,11),(9,17),(13,23),(15,29),(19,33),(25,35),(31,39),(37,43)],
[(2,16),(6,28),(8,14),(18,40),(20,24),(22,26),(30,44),(32,38),(3,17),(7,29),(9,15),(19,41),(21,25),(23,27),(31,45),(33,39)],
[(0,14),(2,10),(6,8),(12,22),(16,30),(18,28),(20,26),(24,34),(32,46),(36,44),(38,40),(1,15),(3,11),(7,9),(13,23),(17,31),(19,29),(21,27),(25,35),(37,45),(39,41)],
[(0,4),(2,12),(8,14),(10,18),(16,20),(26,30),(28,36),(32,38),(34,44),(42,46),(1,5),(3,13),(9,15),(11,19),(17,21),(27,31),(29,37),(33,39),(35,45),(41,43)],
[(4,6),(8,10),(12,16),(14,18),(20,22),(24,26),(28,32),(30,34),(36,38),(40,42),(5,7),(9,11),(13,17),(15,19),(21,23),(25,27),(29,33),(31,35),(37,39),(43,45),(0,1)],
[(2,4),(6,12),(8,20),(14,16),(18,22),(24,28),(26,38),(30,32),(34,40),(42,44),(3,5),(7,13),(9,21),(15,17),(19,23),(25,29),(27,39),(31,33),(35,41)],
[(4,6),(10,20),(12,14),(16,18),(26,36),(28,30),(32,34),(40,42),(5,7),(11,21),(13,15),(17,19),(27,37),(29,31),(33,35),(41,43),(2,3),(44,45)],
[(6,8),(10,14),(20,24),(22,26),(32,36),(38,40),(7,9),(11,15),(21,25),(23,27),(33,37),(39,41),(4,5),(42,43)],
[(8,12),(16,20),(18,24),(22,28),(26,30),(34,38),(9,13),(17,21),(19,25),(23,29),(27,31),(35,39),(6,7),(40,41)],
[(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(8,9),(38,39)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(7,38),(9,40)],
[(1,32),(3,34),(5,36),(11,42),(13,44),(15,46),(7,22),(9,24),(23,38),(25,40)],
[(1,16),(3,18),(5,20),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(27,42),(29,44),(31,46),(7,14),(33,40)],
[(1,8),(3,10),(5,12),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(35,42),(37,44),(39,46)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46)],
],
48: [
[(0,40),(2,24),(4,32),(6,46),(8,12),(10,20),(14,42),(16,28),(18,30),(22,44),(26,36),(34,38),(1,41),(3,25),(5,33),(7,47),(9,13),(11,21),(15,43),(17,29),(19,31),(23,45),(27,37),(35,39)],
[(0,6),(2,22),(4,14),(8,34),(10,26),(12,38),(16,18),(20,36),(24,44),(28,30),(32,42),(40,46),(1,7),(3,23),(5,15),(9,35),(11,27),(13,39),(17,19),(21,37),(25,45),(29,31),(33,43),(41,47)],
[(0,2),(4,8),(6,24),(10,16),(12,18),(14,20),(22,40),(26,32),(28,34),(30,36),(38,42),(44,46),(1,3),(5,9),(7,25),(11,17),(13,19),(15,21),(23,41),(27,33),(29,35),(31,37),(39,43),(45,47)],
[(4,10),(8,16),(12,22),(14,28),(18,32),(24,34),(30,38),(36,42),(5,11),(9,17),(13,23),(15,29),(19,33),(25,35),(31,39),(37,43)],
[(2,16),(6,28),(8,14),(18,40),(20,24),(22,26),(30,44),(32,38),(3,17),(7,29),(9,15),(19,41),(21,25),(23,27),(31,45),(33,39)],
[(0,14),(2,10),(6,8),(12,22),(16,30),(18,28),(20,26),(24,34),(32,46),(36,44),(38,40),(1,15),(3,11),(7,9),(13,23),(17,31),(19,29),(21,27),(25,35),(33,47),(37,45),(39,41)],
[(0,4),(2,12),(8,14),(10,18),(16,20),(26,30),(28,36),(32,38),(34,44),(42,46),(1,5),(3,13),(9,15),(11,19),(17,21),(27,31),(29,37),(33,39),(35,45),(43,47)],
[(4,6),(8,10),(12,16),(14,18),(20,22),(24,26),(28,32),(30,34),(36,38),(40,42),(5,7),(9,11),(13,17),(15,19),(21,23),(25,27),(29,33),(31,35),(37,39),(41,43),(0,1),(46,47)],
[(2,4),(6,12),(8,20),(14,16),(18,22),(24,28),(26,38),(30,32),(34,40),(42,44),(3,5),(7,13),(9,21),(15,17),(19,23),(25,29),(27,39),(31,33),(35,41),(43,45)],
[(4,6),(10,20),(12,14),(16,18),(26,36),(28,30),(32,34),(40,42),(5,7),(11,21),(13,15),(17,19),(27,37),(29,31),(33,35),(41,43),(2,3),(44,45)],
[(6,8),(10,14),(20,24),(22,26),(32,36),(38,40),(7,9),(11,15),(21,25),(23,27),(33,37),(39,41),(4,5),(42,43)],
[(8,12),(16,20),(18,24),(22,28),(26,30),(34,38),(9,13),(17,21),(19,25),(23,29),(27,31),(35,39),(6,7),(40,41)],
[(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(8,9),(38,39)],
[(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(7,38),(9,40)],
[(1,32),(3,34),(5,36),(11,42),(13,44),(15,46),(7,22),(9,24),(23,38),(25,40)],
[(1,16),(3,18),(5,20),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(27,42),(29,44),(31,46),(7,14),(33,40)],
[(1,8),(3,10),(5,12),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(35,42),(37,44),(39,46)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46)],
],
49: [
[(0,4),(2,16),(6,36),(8,34),(10,40),(12,38),(14,18),(20,22),(24,26),(28,32),(30,44),(42,46),(1,41),(3,25),(5,33),(7,47),(9,13),(11,21),(15,43),(17,29),(19,31),(23,45),(27,37),(35,39)],
[(0,6),(2,30),(4,36),(8,24),(10,42),(12,20),(14,28),(16,44),(18,32),(22,38),(26,34),(40,46),(1,7),(3,23),(5,15),(9,35),(11,27),(13,39),(17,19),(21,37),(25,45),(29,31),(33,43),(41,47)],
[(0,8),(2,14),(4,26),(6,24),(10,12),(16,28),(18,30),(20,42),(22,40),(32,44),(34,36),(38,46),(1,3),(5,9),(7,25),(11,17),(13,19),(15,21),(23,41),(27,33),(29,35),(31,37),(39,43),(45,47)],
[(0,10),(4,22),(6,12),(8,20),(14,32),(16,18),(24,42),(26,38),(28,30),(34,40),(36,46),(5,11),(9,17),(13,23),(15,29),(19,33),(25,35),(31,39),(37,43)],
[(4,14),(12,18),(16,22),(28,48),(36,42),(3,17),(7,29),(9,15),(19,41),(21,25),(23,27),(31,45),(33,39)],
[(6,16),(14,20),(22,24),(26,28),(30,42),(36,40),(44,48),(1,15),(3,11),(7,9),(13,23),(17,31),(19,29),(21,27),(25,35),(33,47),(37,45),(39,41)],
[(8,26),(20,32),(22,30),(36,48),(38,44),(1,5),(3,13),(9,15),(11,19),(17,21),(27,31),(29,37),(33,39),(35,45),(43,47)],
[(2,8),(16,22),(18,38),(26,34),(28,36),(32,40),(46,48),(5,7),(9,11),(13,17),(15,19),(21,23),(25,27),(29,33),(31,35),(37,39),(41,43)],
[(0,2),(8,10),(12,26),(18,28),(20,34),(24,32),(36,38),(40,42),(44,46),(3,5),(7,13),(9,21),(15,17),(19,23),(25,29),(27,39),(31,33),(35,41),(43,45)],
[(4,12),(6,8),(10,26),(14,18),(24,36),(30,34),(32,38),(40,44),(42,46),(5,7),(11,21),(13,15),(17,19),(27,37),(29,31),(33,35),(41,43),(0,1)],
[(2,4),(10,16),(12,14),(18,20),(22,26),(28,30),(34,40),(42,44),(7,9),(11,15),(21,25),(23,27),(33,37),(39,41),(46,47)],
[(2,6),(4,8),(10,12),(14,22),(16,18),(20,26),(24,28),(30,32),(34,36),(38,40),(9,13),(17,21),(19,25),(23,29),(27,31),(35,39),(44,45)],
[(4,6),(8,16),(12,14),(18,24),(20,22),(26,28),(30,34),(32,36),(40,42),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(2,3)],
[(6,10),(8,12),(14,16),(18,20),(22,24),(26,30),(28,34),(32,38),(4,5),(40,41),(42,43)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(6,7)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48)],
],
50: [
[(0,4),(2,16),(6,36),(8,34),(10,40),(12,38),(14,18),(20,22),(24,26),(28,32),(30,44),(42,46),(1,5),(3,17),(7,37),(9,35),(11,41),(13,39),(15,19),(21,23),(25,27),(29,33),(31,45),(43,47)],
[(0,6),(2,30),(4,36),(8,24),(10,42),(12,20),(14,28),(16,44),(18,32),(22,38),(26,34),(40,46),(1,7),(3,31),(5,37),(9,25),(11,43),(13,21),(15,29),(17,45),(19,33),(23,39),(27,35),(41,47)],
[(0,8),(2,14),(4,26),(6,24),(10,12),(16,28),(18,30),(20,42),(22,40),(32,44),(34,36),(38,46),(1,9),(3,15),(5,27),(7,25),(11,13),(17,29),(19,31),(21,43),(23,41),(33,45),(35,37),(39,47)],
[(0,10),(4,22),(6,12),(8,20),(14,32),(16,18),(24,42),(26,38),(28,30),(34,40),(36,46),(1,11),(5,23),(7,13),(9,21),(15,33),(17,19),(25,43),(27,39),(29,31),(35,41),(37,47)],
[(4,14),(12,18),(16,22),(28,48),(36,42),(5,15),(13,19),(17,23),(29,49),(37,43)],
[(6,16),(14,20),(22,24),(26,28),(30,42),(36,40),(44,48),(7,17),(15,21),(23,25),(27,29),(31,43),(37,41),(45,49)],
[(8,26),(20,32),(22,30),(36,48),(38,44),(9,27),(21,33),(23,31),(37,49),(39,45)],
[(2,8),(16,22),(18,38),(26,34),(28,36),(32,40),(46,48),(3,9),(17,23),(19,39),(27,35),(29,37),(33,41),(47,49)],
[(0,2),(8,10),(12,26),(18,28),(20,34),(24,32),(36,38),(40,42),(44,46),(1,3),(9,11),(13,27),(19,29),(21,35),(25,33),(37,39),(41,43),(45,47),(48,49)],
[(4,12),(6,8),(10,26),(14,18),(24,36),(30,34),(32,38),(40,44),(42,46),(5,13),(7,9),(11,27),(15,19),(25,37),(31,35),(33,39),(41,45),(43,47),(0,1)],
[(2,4),(10,16),(12,14),(18,20),(22,26),(28,30),(34,40),(42,44),(3,5),(11,17),(13,15),(19,21),(23,27),(29,31),(35,41),(43,45),(46,47)],
[(2,6),(4,8),(10,12),(14,22),(16,18),(20,26),(24,28),(30,32),(34,36),(38,40),(3,7),(5,9),(11,13),(15,23),(17,19),(21,27),(25,29),(31,33),(35,37),(39,41),(44,45)],
[(4,6),(8,16),(12,14),(18,24),(20,22),(26,28),(30,34),(32,36),(40,42),(5,7),(9,17),(13,15),(19,25),(21,23),(27,29),(31,35),(33,37),(41,43),(2,3)],
[(6,10),(8,12),(14,16),(18,20),(22,24),(26,30),(28,34),(32,38),(7,11),(9,13),(15,17),(19,21),(23,25),(27,31),(29,35),(33,39),(4,5),(40,41),(42,43)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(6,7)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48)],
],
51: [
[(0,50),(2,6),(4,18),(8,38),(10,36),(12,42),(14,40),(16,20),(22,24),(26,28),(30,34),(32,46),(44,48),(1,5),(3,17),(7,37),(9,35),(11,41),(13,39),(15,19),(21,23),(25,27),(29,33),(31,45),(43,47)],
[(2,8),(4,32),(6,38),(10,26),(12,44),(14,22),(16,30),(18,46),(20,34),(24,40),(28,36),(42,48),(1,7),(3,31),(5,37),(9,25),(11,43),(13,21),(15,29),(17,45),(19,33),(23,39),(27,35),(41,47)],
[(2,10),(4,16),(6,28),(8,26),(12,14),(18,30),(20,32),(22,44),(24,42),(34,46),(36,38),(40,48),(1,9),(3,15),(5,27),(7,25),(11,13),(17,29),(19,31),(21,43),(23,41),(33,45),(35,37),(39,47)],
[(0,20),(2,12),(6,14),(8,22),(10,24),(26,40),(28,42),(30,50),(36,44),(38,48),(1,11),(5,23),(7,13),(9,21),(15,33),(17,19),(25,43),(27,39),(29,31),(35,41),(37,47)],
[(0,8),(16,20),(24,26),(30,34),(42,50),(5,15),(13,19),(17,23),(29,49),(37,43)],
[(0,4),(8,16),(20,24),(26,30),(34,42),(46,50),(7,17),(15,21),(23,25),(27,29),(31,43),(37,41),(45,49)],
[(0,2),(4,6),(8,10),(16,28),(18,26),(22,34),(24,32),(40,42),(44,46),(48,50),(9,27),(21,33),(23,31),(37,49),(39,45)],
[(2,8),(6,20),(12,18),(14,26),(16,22),(24,36),(28,34),(30,44),(32,38),(42,48),(3,9),(17,23),(19,39),(27,35),(29,37),(33,41),(47,49)],
[(4,12),(6,16),(10,14),(18,24),(26,32),(34,44),(36,40),(38,46),(1,3),(9,11),(13,27),(19,29),(21,35),(25,33),(37,39),(41,43),(45,47)],
[(2,4),(8,12),(10,18),(14,20),(22,24),(26,28),(30,36),(32,40),(38,42),(46,48),(5,13),(7,9),(11,27),(15,19),(25,37),(31,35),(33,39),(41,45),(43,47),(0,1)],
[(4,8),(6,10),(14,26),(16,18),(20,28),(22,30),(24,36),(32,34),(40,44),(42,46),(3,5),(11,17),(13,15),(19,21),(23,27),(29,31),(35,41),(43,45),(48,49)],
[(6,8),(12,18),(14,22),(20,24),(26,30),(28,36),(32,38),(42,44),(3,7),(5,9),(11,13),(15,23),(17,19),(21,27),(25,29),(31,33),(35,37),(39,41),(46,47)],
[(10,14),(12,16),(18,26),(20,22),(24,32),(28,30),(34,38),(36,40),(5,7),(9,17),(13,15),(19,25),(21,23),(27,29),(31,35),(33,37),(41,43),(2,3),(44,45)],
[(10,12),(14,16),(18,20),(22,26),(24,28),(30,32),(34,36),(38,40),(7,11),(9,13),(15,17),(19,21),(23,25),(27,31),(29,35),(33,39),(4,5)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(6,7)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50)],
],
52: [
[(0,50),(2,6),(4,18),(8,38),(10,36),(12,42),(14,40),(16,20),(22,24),(26,28),(30,34),(32,46),(44,48),(1,51),(3,7),(5,19),(9,39),(11,37),(13,43),(15,41),(17,21),(23,25),(27,29),(31,35),(33,47),(45,49)],
[(2,8),(4,32),(6,38),(10,26),(12,44),(14,22),(16,30),(18,46),(20,34),(24,40),(28,36),(42,48),(3,9),(5,33),(7,39),(11,27),(13,45),(15,23),(17,31),(19,47),(21,35),(25,41),(29,37),(43,49)],
[(2,10),(4,16),(6,28),(8,26),(12,14),(18,30),(20,32),(22,44),(24,42),(34,46),(36,38),(40,48),(3,11),(5,17),(7,29),(9,27),(13,15),(19,31),(21,33),(23,45),(25,43),(35,47),(37,39),(41,49)],
[(0,20),(2,12),(6,14),(8,22),(10,24),(26,40),(28,42),(30,50),(36,44),(38,48),(1,21),(3,13),(7,15),(9,23),(11,25),(27,41),(29,43),(31,51),(37,45),(39,49)],
[(0,8),(16,20),(24,26),(30,34),(42,50),(1,9),(17,21),(25,27),(31,35),(43,51)],
[(0,4),(8,16),(20,24),(26,30),(34,42),(46,50),(1,5),(9,17),(21,25),(27,31),(35,43),(47,51)],
[(0,2),(4,6),(8,10),(16,28),(18,26),(22,34),(24,32),(40,42),(44,46),(48,50),(1,3),(5,7),(9,11),(17,29),(19,27),(23,35),(25,33),(41,43),(45,47),(49,51)],
[(2,8),(6,20),(12,18),(14,26),(16,22),(24,36),(28,34),(30,44),(32,38),(42,48),(3,9),(7,21),(13,19),(15,27),(17,23),(25,37),(29,35),(31,45),(33,39),(43,49),(0,1),(50,51)],
[(4,12),(6,16),(10,14),(18,24),(26,32),(34,44),(36,40),(38,46),(5,13),(7,17),(11,15),(19,25),(27,33),(35,45),(37,41),(39,47)],
[(2,4),(8,12),(10,18),(14,20),(22,24),(26,28),(30,36),(32,40),(38,42),(46,48),(3,5),(9,13),(11,19),(15,21),(23,25),(27,29),(31,37),(33,41),(39,43),(47,49)],
[(4,8),(6,10),(14,26),(16,18),(20,28),(22,30),(24,36),(32,34),(40,44),(42,46),(5,9),(7,11),(15,27),(17,19),(21,29),(23,31),(25,37),(33,35),(41,45),(43,47),(2,3),(48,49)],
[(6,8),(12,18),(14,22),(20,24),(26,30),(28,36),(32,38),(42,44),(7,9),(13,19),(15,23),(21,25),(27,31),(29,37),(33,39),(43,45),(4,5),(46,47)],
[(10,14),(12,16),(18,26),(20,22),(24,32),(28,30),(34,38),(36,40),(11,15),(13,17),(19,27),(21,23),(25,33),(29,31),(35,39),(37,41),(6,7),(44,45)],
[(10,12),(14,16),(18,20),(22,26),(24,28),(30,32),(34,36),(38,40),(11,13),(15,17),(19,21),(23,27),(25,29),(31,33),(35,37),(39,41)],
[(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43)],
[(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50)],
],
53: [
[(0,7),(1,10),(3,5),(4,8),(6,13),(9,19),(11,14),(12,17),(15,16),(18,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52)],
[(0,11),(1,15),(2,12),(3,4),(5,8),(6,9),(7,14),(10,16),(13,19),(17,20),(21,23),(22,24),(25,27),(26,28),(29,31),(30,32),(33,35),(34,36),(37,39),(38,40),(41,43),(42,44),(45,47),(46,48),(49,51),(50,52)],
[(0,6),(1,3),(2,18),(4,15),(5,10),(8,16),(11,17),(12,13),(14,20),(21,25),(22,26),(23,27),(24,28),(29,33),(30,34),(31,35),(32,36),(37,41),(38,42),(39,43),(40,44),(45,49),(46,50),(47,51),(48,52)],
[(2,6),(5,12),(7,18),(8,14),(9,11),(10,17),(13,19),(16,20),(21,29),(22,30),(23,31),(24,32),(25,33),(26,34),(27,35),(28,36),(37,45),(38,46),(39,47),(40,48),(41,49),(42,50),(43,51),(44,52)],
[(1,2),(4,7),(5,9),(6,17),(10,13),(11,12),(14,19),(15,18),(21,37),(22,29),(23,25),(24,33),(26,31),(27,30),(28,35),(32,34),(36,52),(38,45),(39,41),(40,49),(42,47),(43,46),(44,51),(48,50)],
[(0,2),(3,6),(4,5),(7,10),(8,11),(9,15),(12,16),(13,18),(14,17),(19,20),(22,23),(24,26),(25,29),(27,43),(28,32),(30,46),(31,33),(34,35),(38,39),(40,42),(41,45),(44,48),(47,49),(50,51)],
[(0,1),(2,3),(5,9),(6,12),(7,8),(11,14),(13,15),(16,19),(17,18),(22,38),(23,39),(24,40),(25,41),(26,31),(28,44),(29,45),(32,48),(33,49),(34,50),(35,51),(42,47),(20,52)],
[(1,2),(3,9),(6,13),(10,11),(12,15),(16,17),(18,19),(24,38),(25,37),(26,42),(27,39),(28,30),(29,41),(31,47),(32,44),(34,46),(35,49),(36,48),(43,45)],
[(1,4),(2,5),(3,7),(6,10),(8,9),(11,12),(13,14),(17,18),(22,25),(24,29),(26,37),(28,38),(30,42),(31,43),(32,40),(33,41),(35,45),(36,47),(44,49),(48,51)],
[(2,4),(5,6),(7,8),(9,11),(10,13),(12,15),(14,16),(23,26),(28,29),(30,39),(32,38),(33,37),(34,43),(35,41),(36,40),(44,45),(47,50),(19,51)],
[(3,4),(5,7),(6,8),(9,10),(11,13),(12,14),(15,16),(23,25),(27,33),(30,37),(31,32),(34,38),(35,39),(36,43),(40,46),(41,42),(48,50)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(26,27),(29,33),(30,31),(32,34),(35,37),(36,38),(39,41),(40,44),(42,43),(46,47),(18,50)],
[(24,26),(27,28),(29,30),(31,33),(32,35),(34,37),(36,39),(38,41),(40,42),(43,44),(45,46),(47,49)],
[(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49)],
[(5,37),(13,45),(9,41),(1,33),(17,49),(7,39),(15,47),(11,43),(3,35),(6,38),(14,46),(10,42),(2,34),(8,40),(0,32),(16,48),(12,44),(4,36)],
[(5,21),(13,29),(9,25),(17,33),(7,23),(15,31),(11,27),(19,35),(6,22),(14,30),(10,26),(18,34),(8,24),(16,32),(12,28),(20,36)],
[(13,21),(29,37),(1,9),(17,25),(33,41),(15,23),(31,39),(3,11),(19,27),(35,43),(14,22),(30,38),(2,10),(18,26),(34,42),(0,8),(16,24),(32,40),(4,12),(20,28),(36,44)],
[(1,5),(9,13),(17,21),(25,29),(33,37),(41,45),(3,7),(11,15),(19,23),(27,31),(35,39),(43,47),(2,6),(10,14),(18,22),(26,30),(34,38),(42,46),(4,8),(12,16),(20,24),(28,32),(36,40),(44,48)],
[(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50)],
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51)],
],
54: [
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53)],
[(0,12),(1,13),(2,6),(3,7),(4,10),(8,20),(9,21),(11,17),(14,18),(15,19),(22,24),(23,25),(26,28),(27,29),(30,32),(31,33),(34,36),(35,37),(38,40),(39,41),(42,44),(43,45),(46,48),(47,49),(50,52),(51,53)],
[(0,2),(1,6),(3,12),(4,16),(5,17),(7,13),(8,14),(9,18),(15,20),(19,21),(22,26),(23,27),(24,28),(25,29),(30,34),(31,35),(32,36),(33,37),(38,42),(39,43),(40,44),(41,45),(46,50),(47,51),(48,52),(49,53)],
[(0,8),(1,15),(2,14),(3,9),(5,11),(6,20),(7,19),(10,16),(12,18),(13,21),(22,30),(23,31),(24,32),(25,33),(26,34),(27,35),(28,36),(29,37),(38,46),(39,47),(40,48),(41,49),(42,50),(43,51),(44,52),(45,53)],
[(0,4),(1,10),(3,8),(5,9),(7,14),(11,20),(12,16),(13,18),(17,21),(22,38),(23,30),(24,26),(25,34),(27,32),(28,31),(29,36),(33,35),(37,53),(39,46),(40,42),(41,50),(43,48),(44,47),(45,52),(49,51)],
[(1,3),(2,5),(4,8),(6,9),(7,10),(11,14),(12,15),(13,17),(16,19),(18,20),(23,24),(25,27),(26,30),(28,44),(29,33),(31,47),(32,34),(35,36),(39,40),(41,43),(42,46),(45,49),(48,50),(51,52),(21,53)],
[(2,4),(3,12),(5,8),(6,11),(9,18),(10,15),(13,16),(17,19),(23,39),(24,40),(25,41),(26,42),(27,32),(29,45),(30,46),(33,49),(34,50),(35,51),(36,52),(43,48)],
[(1,2),(3,4),(5,7),(6,12),(8,11),(9,15),(10,13),(14,16),(17,18),(19,20),(25,39),(26,38),(27,43),(28,40),(29,31),(30,42),(32,48),(33,45),(35,47),(36,50),(37,49),(44,46)],
[(2,3),(4,5),(7,12),(8,10),(9,14),(11,13),(16,17),(18,19),(23,26),(25,30),(27,38),(29,39),(31,43),(32,44),(33,41),(34,42),(36,46),(37,48),(45,50),(49,52)],
[(4,6),(5,8),(9,11),(10,12),(13,16),(15,17),(24,27),(29,30),(31,40),(33,39),(34,38),(35,44),(36,42),(37,41),(45,46),(48,51),(20,52)],
[(3,4),(6,7),(9,10),(11,12),(14,15),(17,18),(24,26),(28,34),(31,38),(32,33),(35,39),(36,40),(37,44),(41,47),(42,43),(49,51)],
[(5,6),(7,8),(10,11),(13,14),(15,16),(27,28),(30,34),(31,32),(33,35),(36,38),(37,39),(40,42),(41,45),(43,44),(47,48),(19,51)],
[(6,7),(8,9),(12,13),(14,15),(25,27),(28,29),(30,31),(32,34),(33,36),(35,38),(37,40),(39,42),(41,43),(44,45),(46,47),(48,50)],
[(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50)],
[(6,38),(14,46),(10,42),(2,34),(18,50),(8,40),(0,32),(16,48),(12,44),(4,36),(7,39),(15,47),(11,43),(3,35),(9,41),(1,33),(17,49),(13,45),(5,37)],
[(6,22),(14,30),(10,26),(18,34),(8,24),(16,32),(12,28),(20,36),(7,23),(15,31),(11,27),(19,35),(9,25),(17,33),(13,29),(21,37)],
[(14,22),(30,38),(2,10),(18,26),(34,42),(0,8),(16,24),(32,40),(4,12),(20,28),(36,44),(15,23),(31,39),(3,11),(19,27),(35,43),(1,9),(17,25),(33,41),(5,13),(21,29),(37,45)],
[(2,6),(10,14),(18,22),(26,30),(34,38),(42,46),(4,8),(12,16),(20,24),(28,32),(36,40),(44,48),(3,7),(11,15),(19,23),(27,31),(35,39),(43,47),(5,9),(13,17),(21,25),(29,33),(37,41),(45,49)],
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52)],
],
55: [
[(0,20),(1,12),(2,16),(4,6),(5,10),(7,21),(8,14),(9,15),(11,22),(13,18),(17,19),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54)],
[(0,3),(1,11),(2,7),(4,17),(5,13),(6,19),(8,9),(10,18),(12,22),(14,15),(16,21),(23,25),(24,26),(27,29),(28,30),(31,33),(32,34),(35,37),(36,38),(39,41),(40,42),(43,45),(44,46),(47,49),(48,50),(51,53),(52,54)],
[(0,1),(2,4),(3,12),(5,8),(6,9),(7,10),(11,20),(13,16),(14,17),(15,18),(19,21),(23,27),(24,28),(25,29),(26,30),(31,35),(32,36),(33,37),(34,38),(39,43),(40,44),(41,45),(42,46),(47,51),(48,52),(49,53),(50,54)],
[(2,5),(4,8),(6,11),(7,14),(9,16),(12,17),(15,19),(18,21),(23,31),(24,32),(25,33),(26,34),(27,35),(28,36),(29,37),(30,38),(39,47),(40,48),(41,49),(42,50),(43,51),(44,52),(45,53),(46,54)],
[(1,8),(3,14),(4,7),(9,20),(10,12),(11,13),(15,22),(16,19),(23,39),(24,31),(25,27),(26,35),(28,33),(29,32),(30,37),(34,36),(38,54),(40,47),(41,43),(42,51),(44,49),(45,48),(46,53),(50,52)],
[(0,7),(1,5),(3,4),(6,11),(8,15),(9,14),(10,13),(12,17),(18,22),(19,20),(24,25),(26,28),(27,31),(29,45),(30,34),(32,48),(33,35),(36,37),(40,41),(42,44),(43,47),(46,50),(49,51),(52,53)],
[(0,2),(1,6),(4,7),(5,9),(8,10),(13,15),(14,18),(16,19),(17,22),(20,21),(24,40),(25,41),(26,42),(27,43),(28,33),(30,46),(31,47),(34,50),(35,51),(36,52),(37,53),(44,49)],
[(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,16),(15,17),(18,19),(21,22),(26,40),(27,39),(28,44),(29,41),(30,32),(31,43),(33,49),(34,46),(36,48),(37,51),(38,50),(45,47)],
[(1,2),(3,6),(4,10),(7,8),(9,11),(12,14),(13,19),(15,16),(17,20),(24,27),(26,31),(28,39),(30,40),(32,44),(33,45),(34,42),(35,43),(37,47),(38,49),(46,51),(50,53),(22,54)],
[(2,3),(5,10),(6,7),(8,9),(13,18),(14,15),(16,17),(20,21),(25,28),(30,31),(32,41),(34,40),(35,39),(36,45),(37,43),(38,42),(46,47),(49,52)],
[(3,4),(5,7),(10,12),(11,13),(16,18),(19,20),(25,27),(29,35),(32,39),(33,34),(36,40),(37,41),(38,45),(42,48),(43,44),(50,52),(21,53)],
[(4,6),(8,10),(9,12),(11,14),(13,15),(17,19),(28,29),(31,35),(32,33),(34,36),(37,39),(38,40),(41,43),(42,46),(44,45),(48,49),(20,52)],
[(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(26,28),(29,30),(31,32),(33,35),(34,37),(36,39),(38,41),(40,43),(42,44),(45,46),(47,48),(49,51)],
[(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51)],
[(7,39),(15,47),(11,43),(3,35),(19,51),(9,41),(1,33),(17,49),(13,45),(5,37),(8,40),(0,32),(16,48),(12,44),(4,36),(10,42),(2,34),(18,50),(14,46),(6,38)],
[(7,23),(15,31),(11,27),(19,35),(9,25),(17,33),(13,29),(21,37),(8,24),(16,32),(12,28),(20,36),(10,26),(18,34),(14,30),(22,38)],
[(15,23),(31,39),(3,11),(19,27),(35,43),(1,9),(17,25),(33,41),(5,13),(21,29),(37,45),(0,8),(16,24),(32,40),(4,12),(20,28),(36,44),(2,10),(18,26),(34,42),(6,14),(22,30),(38,46)],
[(3,7),(11,15),(19,23),(27,31),(35,39),(43,47),(5,9),(13,17),(21,25),(29,33),(37,41),(45,49),(4,8),(12,16),(20,24),(28,32),(36,40),(44,48),(6,10),(14,18),(22,26),(30,34),(38,42),(46,50)],
[(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51),(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52)],
[(0,1),(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53)],
],
56: [
[(0,20),(1,12),(2,16),(3,23),(4,6),(5,10),(7,21),(8,14),(9,15),(11,22),(13,18),(17,19),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55)],
[(0,3),(1,11),(2,7),(4,17),(5,13),(6,19),(8,9),(10,18),(12,22),(14,15),(16,21),(20,23),(24,26),(25,27),(28,30),(29,31),(32,34),(33,35),(36,38),(37,39),(40,42),(41,43),(44,46),(45,47),(48,50),(49,51),(52,54),(53,55)],
[(0,1),(2,4),(3,12),(5,8),(6,9),(7,10),(11,20),(13,16),(14,17),(15,18),(19,21),(22,23),(24,28),(25,29),(26,30),(27,31),(32,36),(33,37),(34,38),(35,39),(40,44),(41,45),(42,46),(43,47),(48,52),(49,53),(50,54),(51,55)],
[(2,5),(4,8),(6,11),(7,14),(9,16),(12,17),(15,19),(18,21),(24,32),(25,33),(26,34),(27,35),(28,36),(29,37),(30,38),(31,39),(40,48),(41,49),(42,50),(43,51),(44,52),(45,53),(46,54),(47,55)],
[(1,8),(3,14),(4,7),(9,20),(10,12),(11,13),(15,22),(16,19),(24,40),(25,32),(26,28),(27,36),(29,34),(30,33),(31,38),(35,37),(39,55),(41,48),(42,44),(43,52),(45,50),(46,49),(47,54),(51,53)],
[(0,7),(1,5),(3,4),(6,11),(8,15),(9,14),(10,13),(12,17),(16,23),(18,22),(19,20),(25,26),(27,29),(28,32),(30,46),(31,35),(33,49),(34,36),(37,38),(41,42),(43,45),(44,48),(47,51),(50,52),(53,54)],
[(0,2),(1,6),(4,7),(5,9),(8,10),(13,15),(14,18),(16,19),(17,22),(21,23),(25,41),(26,42),(27,43),(28,44),(29,34),(31,47),(32,48),(35,51),(36,52),(37,53),(38,54),(45,50)],
[(2,3),(4,5),(6,8),(7,9),(10,11),(12,13),(14,16),(15,17),(18,19),(20,21),(27,41),(28,40),(29,45),(30,42),(31,33),(32,44),(34,50),(35,47),(37,49),(38,52),(39,51),(46,48),(23,55)],
[(1,2),(3,6),(4,10),(7,8),(9,11),(12,14),(13,19),(15,16),(17,20),(21,22),(25,28),(27,32),(29,40),(31,41),(33,45),(34,46),(35,43),(36,44),(38,48),(39,50),(47,52),(51,54)],
[(2,3),(5,10),(6,7),(8,9),(13,18),(14,15),(16,17),(20,21),(26,29),(31,32),(33,42),(35,41),(36,40),(37,46),(38,44),(39,43),(47,48),(50,53),(22,54)],
[(3,4),(5,7),(10,12),(11,13),(16,18),(19,20),(26,28),(30,36),(33,40),(34,35),(37,41),(38,42),(39,46),(43,49),(44,45),(51,53)],
[(4,6),(8,10),(9,12),(11,14),(13,15),(17,19),(29,30),(32,36),(33,34),(35,37),(38,40),(39,41),(42,44),(43,47),(45,46),(49,50),(21,53)],
[(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(27,29),(30,31),(32,33),(34,36),(35,38),(37,40),(39,42),(41,44),(43,45),(46,47),(48,49),(50,52)],
[(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52)],
[(8,40),(0,32),(16,48),(12,44),(4,36),(20,52),(10,42),(2,34),(18,50),(14,46),(6,38),(9,41),(1,33),(17,49),(13,45),(5,37),(11,43),(3,35),(19,51),(15,47),(7,39)],
[(8,24),(16,32),(12,28),(20,36),(10,26),(18,34),(14,30),(22,38),(9,25),(17,33),(13,29),(21,37),(11,27),(19,35),(15,31),(23,39)],
[(0,8),(16,24),(32,40),(4,12),(20,28),(36,44),(2,10),(18,26),(34,42),(6,14),(22,30),(38,46),(1,9),(17,25),(33,41),(5,13),(21,29),(37,45),(3,11),(19,27),(35,43),(7,15),(23,31),(39,47)],
[(4,8),(12,16),(20,24),(28,32),(36,40),(44,48),(6,10),(14,18),(22,26),(30,34),(38,42),(46,50),(5,9),(13,17),(21,25),(29,33),(37,41),(45,49),(7,11),(15,19),(23,27),(31,35),(39,43),(47,51)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54)],
],
57: [
[(0,24),(2,20),(4,18),(6,14),(10,22),(12,16),(26,52),(28,50),(30,56),(32,54),(34,42),(36,38),(40,48),(44,46),(1,19),(3,41),(5,43),(7,45),(9,39),(11,49),(13,51),(15,53),(17,47),(21,31),(23,27),(25,35),(29,33),(37,55)],
[(2,12),(4,6),(8,22),(14,18),(16,20),(26,36),(28,40),(30,44),(32,34),(38,52),(42,54),(46,56),(48,50),(1,37),(3,15),(5,13),(7,11),(9,17),(19,55),(21,25),(23,29),(27,33),(31,35),(39,47),(41,53),(43,51),(45,49)],
[(0,8),(2,4),(6,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,42),(40,44),(46,48),(50,52),(54,56),(3,5),(7,9),(11,39),(13,41),(15,43),(17,45),(19,37),(21,23),(25,29),(27,31),(33,35),(47,49),(51,53)],
[(8,12),(10,18),(16,22),(20,24),(26,30),(28,32),(34,46),(36,48),(38,40),(42,44),(50,54),(52,56),(1,7),(3,21),(11,17),(13,15),(23,27),(29,33),(35,53),(39,45),(41,43),(49,55)],
[(0,10),(6,16),(8,14),(12,22),(18,20),(28,30),(32,50),(34,38),(36,40),(42,46),(44,48),(52,54),(1,3),(5,15),(7,21),(9,17),(25,27),(29,31),(35,49),(39,47),(41,51),(53,55)],
[(0,2),(4,10),(12,18),(14,16),(20,22),(28,34),(30,38),(36,42),(40,46),(44,52),(48,54),(3,7),(5,13),(9,11),(15,39),(17,41),(23,25),(27,29),(31,33),(43,51),(45,47),(49,53)],
[(0,26),(2,6),(4,8),(10,12),(18,20),(30,34),(32,38),(44,50),(48,52),(5,9),(11,25),(15,17),(19,23),(21,29),(27,35),(31,45),(33,37),(39,41),(47,51)],
[(2,4),(6,8),(10,14),(12,16),(32,36),(38,42),(40,44),(46,50),(5,19),(9,23),(11,13),(15,27),(17,21),(29,41),(33,47),(35,39),(37,51),(43,45),(0,1)],
[(2,28),(4,6),(8,10),(12,14),(16,18),(32,34),(36,38),(40,42),(44,46),(48,50),(3,5),(7,33),(9,19),(13,25),(21,29),(23,49),(27,35),(31,43),(37,47),(51,53)],
[(4,30),(6,8),(10,12),(20,46),(22,48),(24,50),(38,40),(42,44),(5,17),(7,11),(9,15),(13,33),(19,31),(23,43),(25,37),(39,51),(41,47),(45,49)],
[(6,32),(8,34),(10,36),(12,38),(14,40),(16,42),(18,44),(20,30),(5,7),(11,17),(15,19),(23,31),(25,33),(37,41),(39,45),(49,51)],
[(12,20),(16,26),(18,28),(22,32),(24,34),(36,52),(38,54),(40,56),(13,17),(21,25),(23,27),(29,33),(31,35),(39,43)],
[(8,16),(10,18),(14,22),(24,26),(28,36),(30,38),(32,40),(34,42),(44,52),(46,54),(48,56),(11,13),(17,21),(19,23),(25,27),(29,31),(33,37),(35,39),(43,45)],
[(4,8),(6,10),(12,16),(14,18),(20,24),(22,28),(26,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(9,11),(13,15),(17,19),(21,23),(25,29),(27,31),(33,35),(37,39),(41,43),(45,47)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56)],
],
58: [
[(0,25),(1,3),(2,9),(4,19),(5,18),(6,21),(7,20),(8,10),(11,12),(13,14),(15,17),(16,23),(22,24),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,4),(2,16),(3,19),(5,13),(6,22),(7,11),(8,15),(9,23),(10,17),(12,20),(14,18),(21,24),(26,28),(27,29),(30,32),(31,33),(34,36),(35,37),(38,40),(39,41),(42,44),(43,45),(46,48),(47,49),(50,52),(51,53),(54,56),(55,57)],
[(1,5),(2,8),(3,14),(4,13),(6,7),(9,15),(10,16),(11,22),(12,21),(17,23),(18,19),(20,24),(26,30),(27,31),(28,32),(29,33),(34,38),(35,39),(36,40),(37,41),(42,46),(43,47),(44,48),(45,49),(50,54),(51,55),(52,56),(53,57)],
[(0,10),(1,6),(3,7),(4,11),(5,12),(13,20),(14,21),(15,25),(18,22),(19,24),(26,34),(27,35),(28,36),(29,37),(30,38),(31,39),(32,40),(33,41),(42,50),(43,51),(44,52),(45,53),(46,54),(47,55),(48,56),(49,57)],
[(0,4),(8,10),(12,13),(15,17),(21,25),(26,42),(27,34),(28,30),(29,38),(31,36),(32,35),(33,40),(37,39),(41,57),(43,50),(44,46),(45,54),(47,52),(48,51),(49,56),(53,55)],
[(0,2),(4,8),(10,12),(13,15),(17,21),(23,25),(27,28),(29,31),(30,34),(32,48),(33,37),(35,51),(36,38),(39,40),(43,44),(45,47),(46,50),(49,53),(52,54),(55,56)],
[(0,1),(2,3),(4,5),(8,14),(9,13),(11,17),(12,16),(20,21),(22,23),(24,25),(27,43),(28,44),(29,45),(30,46),(31,36),(33,49),(34,50),(37,53),(38,54),(39,55),(40,56),(47,52)],
[(1,4),(3,10),(6,9),(7,13),(8,11),(12,18),(14,17),(15,22),(16,19),(21,24),(29,43),(30,42),(31,47),(32,44),(33,35),(34,46),(36,52),(37,49),(39,51),(40,54),(41,53),(48,50),(25,57)],
[(2,6),(3,8),(5,7),(9,12),(13,16),(17,22),(18,20),(19,23),(27,30),(29,34),(31,42),(33,43),(35,47),(36,48),(37,45),(38,46),(40,50),(41,52),(49,54),(53,56)],
[(1,2),(4,6),(5,9),(7,10),(11,12),(13,14),(15,18),(16,20),(19,21),(23,24),(28,31),(33,34),(35,44),(37,43),(38,42),(39,48),(40,46),(41,45),(49,50),(52,55)],
[(2,4),(3,5),(7,13),(8,9),(10,14),(11,15),(12,18),(16,17),(20,22),(21,23),(28,30),(32,38),(35,42),(36,37),(39,43),(40,44),(41,48),(45,51),(46,47),(53,55),(24,56)],
[(3,4),(6,9),(7,11),(10,12),(13,15),(14,18),(16,19),(21,22),(31,32),(34,38),(35,36),(37,39),(40,42),(41,43),(44,46),(45,49),(47,48),(51,52),(23,55)],
[(5,7),(6,8),(9,13),(10,11),(12,16),(14,15),(17,19),(18,20),(29,31),(32,33),(34,35),(36,38),(37,40),(39,42),(41,44),(43,46),(45,47),(48,49),(50,51),(52,54)],
[(5,6),(7,8),(9,10),(11,13),(12,14),(15,16),(17,18),(19,20),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54)],
[(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(2,34),(22,54),(0,32),(3,35),(1,33)],
[(10,42),(18,50),(14,46),(6,38),(12,44),(4,36),(20,52),(16,48),(8,40),(11,43),(19,51),(15,47),(7,39),(13,45),(5,37),(21,53),(17,49),(9,41)],
[(10,26),(18,34),(14,30),(22,38),(12,28),(20,36),(16,32),(24,40),(11,27),(19,35),(15,31),(23,39),(13,29),(21,37),(17,33),(25,41)],
[(2,10),(18,26),(34,42),(6,14),(22,30),(38,46),(4,12),(20,28),(36,44),(8,16),(24,32),(40,48),(3,11),(19,27),(35,43),(7,15),(23,31),(39,47),(5,13),(21,29),(37,45),(9,17),(25,33),(41,49)],
[(6,10),(14,18),(22,26),(30,34),(38,42),(46,50),(0,4),(8,12),(16,20),(24,28),(32,36),(40,44),(48,52),(7,11),(15,19),(23,27),(31,35),(39,43),(47,51),(1,5),(9,13),(17,21),(25,29),(33,37),(41,45),(49,53)],
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(52,54),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51),(53,55)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56)],
],
59: [
[(2,4),(6,20),(8,28),(10,16),(12,26),(14,24),(18,22),(32,34),(36,50),(38,58),(40,46),(42,56),(44,54),(48,52),(1,25),(3,21),(5,19),(7,15),(11,23),(13,17),(27,53),(29,51),(31,57),(33,55),(35,43),(37,39),(41,49),(45,47)],
[(0,28),(2,10),(4,16),(6,14),(12,18),(20,24),(22,26),(30,58),(32,40),(34,46),(36,44),(42,48),(50,54),(52,56),(3,13),(5,7),(9,23),(15,19),(17,21),(27,37),(29,41),(31,45),(33,35),(39,53),(43,55),(47,57),(49,51)],
[(0,14),(2,12),(4,18),(8,20),(10,22),(16,26),(24,28),(30,44),(32,42),(34,48),(38,50),(40,52),(46,56),(54,58),(1,9),(3,5),(7,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,43),(41,45),(47,49),(51,53),(55,57)],
[(0,12),(4,8),(6,10),(14,22),(16,20),(18,24),(26,28),(30,42),(34,38),(36,40),(44,52),(46,50),(48,54),(56,58),(9,13),(11,19),(17,23),(21,25),(27,31),(29,33),(35,47),(37,49),(39,41),(43,45),(51,55),(53,57)],
[(0,6),(2,4),(8,14),(10,18),(12,16),(20,22),(24,26),(28,58),(30,36),(32,34),(38,44),(40,48),(42,46),(50,52),(54,56),(1,11),(7,17),(9,15),(13,23),(19,21),(29,31),(33,51),(35,39),(37,41),(43,47),(45,49),(53,55)],
[(0,2),(4,6),(8,12),(14,18),(20,24),(22,26),(30,32),(34,36),(38,42),(44,48),(50,54),(52,56),(1,3),(5,11),(13,19),(15,17),(21,23),(29,35),(31,39),(37,43),(41,47),(45,53),(49,55)],
[(0,30),(2,4),(6,10),(16,20),(22,24),(26,56),(32,34),(36,40),(46,50),(52,54),(1,27),(3,7),(5,9),(11,13),(19,21),(31,35),(33,39),(45,51),(49,53)],
[(2,32),(6,8),(10,12),(14,16),(18,20),(24,54),(36,38),(40,42),(44,46),(48,50),(3,5),(7,9),(11,15),(13,17),(33,37),(39,43),(41,45),(47,51),(0,1)],
[(4,6),(8,10),(12,14),(16,18),(20,22),(34,36),(38,40),(42,44),(46,48),(50,52),(3,29),(5,7),(9,11),(13,15),(17,19),(33,35),(37,39),(41,43),(45,47),(49,51)],
[(4,34),(6,36),(8,38),(10,12),(14,16),(18,48),(20,50),(22,52),(40,42),(44,46),(5,31),(7,9),(11,13),(21,47),(23,49),(25,51),(39,41),(43,45)],
[(10,40),(12,42),(14,44),(16,46),(18,32),(20,34),(22,36),(24,38),(7,33),(9,35),(11,37),(13,39),(15,41),(17,43),(19,45),(21,31)],
[(10,18),(12,20),(14,22),(16,30),(26,40),(28,42),(36,44),(38,46),(13,21),(17,27),(19,29),(23,33),(25,35),(37,53),(39,55),(41,57)],
[(6,10),(8,16),(14,18),(24,30),(26,32),(28,34),(40,48),(42,50),(9,17),(11,19),(15,23),(25,27),(29,37),(31,39),(33,41),(35,43),(45,53),(47,55),(49,57)],
[(4,8),(12,16),(20,24),(22,26),(28,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(5,9),(7,11),(13,17),(15,19),(21,25),(23,29),(27,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58)],
],
60: [
[(2,4),(6,20),(8,28),(10,16),(12,26),(14,24),(18,22),(32,34),(36,50),(38,58),(40,46),(42,56),(44,54),(48,52),(3,5),(7,21),(9,29),(11,17),(13,27),(15,25),(19,23),(33,35),(37,51),(39,59),(41,47),(43,57),(45,55),(49,53)],
[(0,28),(2,10),(4,16),(6,14),(12,18),(20,24),(22,26),(30,58),(32,40),(34,46),(36,44),(42,48),(50,54),(52,56),(1,29),(3,11),(5,17),(7,15),(13,19),(21,25),(23,27),(31,59),(33,41),(35,47),(37,45),(43,49),(51,55),(53,57)],
[(0,14),(2,12),(4,18),(8,20),(10,22),(16,26),(24,28),(30,44),(32,42),(34,48),(38,50),(40,52),(46,56),(54,58),(1,15),(3,13),(5,19),(9,21),(11,23),(17,27),(25,29),(31,45),(33,43),(35,49),(39,51),(41,53),(47,57),(55,59)],
[(0,12),(4,8),(6,10),(14,22),(16,20),(18,24),(26,28),(30,42),(34,38),(36,40),(44,52),(46,50),(48,54),(56,58),(1,13),(5,9),(7,11),(15,23),(17,21),(19,25),(27,29),(31,43),(35,39),(37,41),(45,53),(47,51),(49,55),(57,59)],
[(0,6),(2,4),(8,14),(10,18),(12,16),(20,22),(24,26),(28,58),(30,36),(32,34),(38,44),(40,48),(42,46),(50,52),(54,56),(1,7),(3,5),(9,15),(11,19),(13,17),(21,23),(25,27),(29,59),(31,37),(33,35),(39,45),(41,49),(43,47),(51,53),(55,57)],
[(0,2),(4,6),(8,12),(14,18),(20,24),(22,26),(30,32),(34,36),(38,42),(44,48),(50,54),(52,56),(1,3),(5,7),(9,13),(15,19),(21,25),(23,27),(31,33),(35,37),(39,43),(45,49),(51,55),(53,57),(58,59)],
[(0,30),(2,4),(6,10),(16,20),(22,24),(26,56),(32,34),(36,40),(46,50),(52,54),(1,31),(3,5),(7,11),(17,21),(23,25),(27,57),(33,35),(37,41),(47,51),(53,55)],
[(2,32),(6,8),(10,12),(14,16),(18,20),(24,54),(36,38),(40,42),(44,46),(48,50),(3,33),(7,9),(11,13),(15,17),(19,21),(25,55),(37,39),(41,43),(45,47),(49,51),(0,1)],
[(4,6),(8,10),(12,14),(16,18),(20,22),(34,36),(38,40),(42,44),(46,48),(50,52),(5,7),(9,11),(13,15),(17,19),(21,23),(35,37),(39,41),(43,45),(47,49),(51,53)],
[(4,34),(6,36),(8,38),(10,12),(14,16),(18,48),(20,50),(22,52),(40,42),(44,46),(5,35),(7,37),(9,39),(11,13),(15,17),(19,49),(21,51),(23,53),(41,43),(45,47)],
[(10,40),(12,42),(14,44),(16,46),(18,32),(20,34),(22,36),(24,38),(11,41),(13,43),(15,45),(17,47),(19,33),(21,35),(23,37),(25,39)],
[(10,18),(12,20),(14,22),(16,30),(26,40),(28,42),(36,44),(38,46),(11,19),(13,21),(15,23),(17,31),(27,41),(29,43),(37,45),(39,47)],
[(6,10),(8,16),(14,18),(24,30),(26,32),(28,34),(40,48),(42,50),(7,11),(9,17),(15,19),(25,31),(27,33),(29,35),(41,49),(43,51)],
[(4,8),(12,16),(20,24),(22,26),(28,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(5,9),(13,17),(21,25),(23,27),(29,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55)],
[(2,4),(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58)],
],
61: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(52,54),(56,58),(3,5),(7,21),(9,29),(11,17),(13,27),(15,25),(19,23),(33,35),(37,51),(39,59),(41,47),(43,57),(45,55),(49,53)],
[(0,4),(2,6),(8,12),(10,14),(16,20),(18,22),(24,28),(26,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(56,60),(1,29),(3,11),(5,17),(7,15),(13,19),(21,25),(23,27),(31,59),(33,41),(35,47),(37,45),(43,49),(51,55),(53,57)],
[(0,8),(2,10),(4,12),(6,14),(16,24),(18,26),(20,28),(22,30),(32,40),(34,42),(36,44),(38,46),(48,56),(50,58),(52,60),(1,15),(3,13),(5,19),(9,21),(11,23),(17,27),(25,29),(31,45),(33,43),(35,49),(39,51),(41,53),(47,57),(55,59)],
[(0,16),(2,18),(4,20),(6,22),(8,24),(10,26),(12,28),(14,30),(32,48),(34,50),(36,52),(38,54),(40,56),(42,58),(44,60),(1,13),(5,9),(7,11),(15,23),(17,21),(19,25),(27,29),(31,43),(35,39),(37,41),(45,53),(47,51),(49,55),(57,59)],
[(0,32),(2,16),(4,8),(6,24),(10,20),(12,18),(14,28),(22,26),(34,48),(36,40),(38,56),(42,52),(44,50),(46,60),(54,58),(1,7),(3,5),(9,15),(11,19),(13,17),(21,23),(25,27),(29,59),(31,37),(33,35),(39,45),(41,49),(43,47),(51,53),(55,57)],
[(2,4),(6,10),(8,16),(12,44),(14,22),(18,50),(20,24),(26,28),(34,36),(38,42),(40,48),(46,54),(52,56),(58,60),(1,3),(5,7),(9,13),(15,19),(21,25),(23,27),(31,33),(35,37),(39,43),(45,49),(51,55),(53,57)],
[(2,34),(4,36),(6,38),(8,40),(10,20),(14,46),(16,48),(22,54),(24,56),(26,58),(28,60),(42,52),(1,31),(3,5),(7,11),(17,21),(23,25),(27,57),(33,35),(37,41),(47,51),(53,55)],
[(6,34),(8,32),(10,42),(12,36),(14,18),(16,40),(20,52),(22,46),(26,50),(28,56),(30,54),(44,48),(3,33),(7,9),(11,13),(15,17),(19,21),(25,55),(37,39),(41,43),(45,47),(49,51),(0,1)],
[(2,8),(6,16),(10,32),(14,34),(18,42),(20,44),(22,38),(24,40),(28,48),(30,52),(46,56),(54,60),(5,7),(9,11),(13,15),(17,19),(21,23),(35,37),(39,41),(43,45),(47,49),(51,53)],
[(4,10),(14,16),(18,36),(22,34),(24,32),(26,44),(28,40),(30,38),(46,48),(52,58),(5,35),(7,37),(9,39),(11,13),(15,17),(19,49),(21,51),(23,53),(41,43),(45,47)],
[(4,8),(12,24),(18,32),(20,22),(26,34),(28,36),(30,44),(38,50),(40,42),(54,58),(11,41),(13,43),(15,45),(17,47),(19,33),(21,35),(23,37),(25,39)],
[(10,12),(16,24),(18,20),(22,26),(28,32),(30,34),(36,40),(38,46),(42,44),(50,52),(11,19),(13,21),(15,23),(17,31),(27,41),(29,43),(37,45),(39,47),(58,59)],
[(6,10),(12,14),(16,18),(20,24),(22,28),(26,32),(30,36),(34,40),(38,42),(44,46),(48,50),(52,56),(7,11),(9,17),(15,19),(25,31),(27,33),(29,35),(41,49),(43,51)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(5,9),(13,17),(21,25),(23,27),(29,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55)],
[(3,5),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(2,3),(4,5),(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58),(29,60)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58),(45,60)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58),(53,60)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58),(57,60)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58),(59,60)],
],
62: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(52,54),(56,58),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51),(53,55),(57,59)],
[(0,4),(2,6),(8,12),(10,14),(16,20),(18,22),(24,28),(26,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(56,60),(1,5),(3,7),(9,13),(11,15),(17,21),(19,23),(25,29),(27,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55),(57,61)],
[(0,8),(2,10),(4,12),(6,14),(16,24),(18,26),(20,28),(22,30),(32,40),(34,42),(36,44),(38,46),(48,56),(50,58),(52,60),(1,9),(3,11),(5,13),(7,15),(17,25),(19,27),(21,29),(23,31),(33,41),(35,43),(37,45),(39,47),(49,57),(51,59),(53,61)],
[(0,16),(2,18),(4,20),(6,22),(8,24),(10,26),(12,28),(14,30),(32,48),(34,50),(36,52),(38,54),(40,56),(42,58),(44,60),(1,17),(3,19),(5,21),(7,23),(9,25),(11,27),(13,29),(15,31),(33,49),(35,51),(37,53),(39,55),(41,57),(43,59),(45,61)],
[(0,32),(2,16),(4,8),(6,24),(10,20),(12,18),(14,28),(22,26),(34,48),(36,40),(38,56),(42,52),(44,50),(46,60),(54,58),(1,33),(3,17),(5,9),(7,25),(11,21),(13,19),(15,29),(23,27),(35,49),(37,41),(39,57),(43,53),(45,51),(47,61),(55,59)],
[(2,4),(6,10),(8,16),(12,44),(14,22),(18,50),(20,24),(26,28),(34,36),(38,42),(40,48),(46,54),(52,56),(58,60),(3,5),(7,11),(9,17),(13,45),(15,23),(19,51),(21,25),(27,29),(35,37),(39,43),(41,49),(47,55),(53,57),(59,61),(0,1)],
[(2,34),(4,36),(6,38),(8,40),(10,20),(14,46),(16,48),(22,54),(24,56),(26,58),(28,60),(42,52),(3,35),(5,37),(7,39),(9,41),(11,21),(15,47),(17,49),(23,55),(25,57),(27,59),(29,61),(43,53)],
[(6,34),(8,32),(10,42),(12,36),(14,18),(16,40),(20,52),(22,46),(26,50),(28,56),(30,54),(44,48),(7,35),(9,33),(11,43),(13,37),(15,19),(17,41),(21,53),(23,47),(27,51),(29,57),(31,55),(45,49)],
[(2,8),(6,16),(10,32),(14,34),(18,42),(20,44),(22,38),(24,40),(28,48),(30,52),(46,56),(54,60),(3,9),(7,17),(11,33),(15,35),(19,43),(21,45),(23,39),(25,41),(29,49),(31,53),(47,57),(55,61)],
[(4,10),(14,16),(18,36),(22,34),(24,32),(26,44),(28,40),(30,38),(46,48),(52,58),(5,11),(15,17),(19,37),(23,35),(25,33),(27,45),(29,41),(31,39),(47,49),(53,59),(2,3),(60,61)],
[(4,8),(12,24),(18,32),(20,22),(26,34),(28,36),(30,44),(38,50),(40,42),(54,58),(5,9),(13,25),(19,33),(21,23),(27,35),(29,37),(31,45),(39,51),(41,43),(55,59)],
[(10,12),(16,24),(18,20),(22,26),(28,32),(30,34),(36,40),(38,46),(42,44),(50,52),(11,13),(17,25),(19,21),(23,27),(29,33),(31,35),(37,41),(39,47),(43,45),(51,53),(4,5),(58,59)],
[(6,10),(12,14),(16,18),(20,24),(22,28),(26,32),(30,36),(34,40),(38,42),(44,46),(48,50),(52,56),(7,11),(13,15),(17,19),(21,25),(23,29),(27,33),(31,37),(35,41),(39,43),(45,47),(49,51),(53,57)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58),(29,60)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58),(45,60)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58),(53,60)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58),(57,60)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58),(59,60)],
],
63: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(52,54),(56,58),(60,62),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51),(53,55),(57,59)],
[(0,4),(2,6),(8,12),(10,14),(16,20),(18,22),(24,28),(26,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(56,60),(58,62),(1,5),(3,7),(9,13),(11,15),(17,21),(19,23),(25,29),(27,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55),(57,61)],
[(0,8),(2,10),(4,12),(6,14),(16,24),(18,26),(20,28),(22,30),(32,40),(34,42),(36,44),(38,46),(48,56),(50,58),(52,60),(54,62),(1,9),(3,11),(5,13),(7,15),(17,25),(19,27),(21,29),(23,31),(33,41),(35,43),(37,45),(39,47),(49,57),(51,59),(53,61)],
[(0,16),(2,18),(4,20),(6,22),(8,24),(10,26),(12,28),(14,30),(32,48),(34,50),(36,52),(38,54),(40,56),(42,58),(44,60),(46,62),(1,17),(3,19),(5,21),(7,23),(9,25),(11,27),(13,29),(15,31),(33,49),(35,51),(37,53),(39,55),(41,57),(43,59),(45,61)],
[(0,32),(2,16),(4,8),(6,24),(10,20),(12,18),(14,28),(22,26),(30,62),(34,48),(36,40),(38,56),(42,52),(44,50),(46,60),(54,58),(1,33),(3,17),(5,9),(7,25),(11,21),(13,19),(15,29),(23,27),(35,49),(37,41),(39,57),(43,53),(45,51),(47,61),(55,59)],
[(2,4),(6,10),(8,16),(12,44),(14,22),(18,50),(20,24),(26,28),(34,36),(38,42),(40,48),(46,54),(52,56),(58,60),(3,5),(7,11),(9,17),(13,45),(15,23),(19,51),(21,25),(27,29),(35,37),(39,43),(41,49),(47,55),(53,57),(59,61),(0,1)],
[(2,34),(4,36),(6,38),(8,40),(10,20),(14,46),(16,48),(22,54),(24,56),(26,58),(28,60),(42,52),(3,35),(5,37),(7,39),(9,41),(11,21),(15,47),(17,49),(23,55),(25,57),(27,59),(29,61),(43,53)],
[(6,34),(8,32),(10,42),(12,36),(14,18),(16,40),(20,52),(22,46),(26,50),(28,56),(30,54),(44,48),(7,35),(9,33),(11,43),(13,37),(15,19),(17,41),(21,53),(23,47),(27,51),(29,57),(31,55),(45,49)],
[(2,8),(6,16),(10,32),(14,34),(18,42),(20,44),(22,38),(24,40),(28,48),(30,52),(46,56),(54,60),(3,9),(7,17),(11,33),(15,35),(19,43),(21,45),(23,39),(25,41),(29,49),(31,53),(47,57),(55,61)],
[(4,10),(14,16),(18,36),(22,34),(24,32),(26,44),(28,40),(30,38),(46,48),(52,58),(5,11),(15,17),(19,37),(23,35),(25,33),(27,45),(29,41),(31,39),(47,49),(53,59),(2,3),(60,61)],
[(4,8),(12,24),(18,32),(20,22),(26,34),(28,36),(30,44),(38,50),(40,42),(54,58),(5,9),(13,25),(19,33),(21,23),(27,35),(29,37),(31,45),(39,51),(41,43),(55,59)],
[(10,12),(16,24),(18,20),(22,26),(28,32),(30,34),(36,40),(38,46),(42,44),(50,52),(11,13),(17,25),(19,21),(23,27),(29,33),(31,35),(37,41),(39,47),(43,45),(51,53),(4,5),(58,59)],
[(6,10),(12,14),(16,18),(20,24),(22,28),(26,32),(30,36),(34,40),(38,42),(44,46),(48,50),(52,56),(7,11),(13,15),(17,19),(21,25),(23,29),(27,33),(31,37),(35,41),(39,43),(45,47),(49,51),(53,57)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58),(29,60),(31,62)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58),(45,60),(47,62)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58),(53,60),(55,62)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58),(57,60),(59,62)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58),(59,60),(61,62)],
],
64: [
[(0,2),(4,6),(8,10),(12,14),(16,18),(20,22),(24,26),(28,30),(32,34),(36,38),(40,42),(44,46),(48,50),(52,54),(56,58),(60,62),(1,3),(5,7),(9,11),(13,15),(17,19),(21,23),(25,27),(29,31),(33,35),(37,39),(41,43),(45,47),(49,51),(53,55),(57,59),(61,63)],
[(0,4),(2,6),(8,12),(10,14),(16,20),(18,22),(24,28),(26,30),(32,36),(34,38),(40,44),(42,46),(48,52),(50,54),(56,60),(58,62),(1,5),(3,7),(9,13),(11,15),(17,21),(19,23),(25,29),(27,31),(33,37),(35,39),(41,45),(43,47),(49,53),(51,55),(57,61),(59,63)],
[(0,8),(2,10),(4,12),(6,14),(16,24),(18,26),(20,28),(22,30),(32,40),(34,42),(36,44),(38,46),(48,56),(50,58),(52,60),(54,62),(1,9),(3,11),(5,13),(7,15),(17,25),(19,27),(21,29),(23,31),(33,41),(35,43),(37,45),(39,47),(49,57),(51,59),(53,61),(55,63)],
[(0,16),(2,18),(4,20),(6,22),(8,24),(10,26),(12,28),(14,30),(32,48),(34,50),(36,52),(38,54),(40,56),(42,58),(44,60),(46,62),(1,17),(3,19),(5,21),(7,23),(9,25),(11,27),(13,29),(15,31),(33,49),(35,51),(37,53),(39,55),(41,57),(43,59),(45,61),(47,63)],
[(0,32),(2,16),(4,8),(6,24),(10,20),(12,18),(14,28),(22,26),(30,62),(34,48),(36,40),(38,56),(42,52),(44,50),(46,60),(54,58),(1,33),(3,17),(5,9),(7,25),(11,21),(13,19),(15,29),(23,27),(31,63),(35,49),(37,41),(39,57),(43,53),(45,51),(47,61),(55,59)],
[(2,4),(6,10),(8,16),(12,44),(14,22),(18,50),(20,24),(26,28),(34,36),(38,42),(40,48),(46,54),(52,56),(58,60),(3,5),(7,11),(9,17),(13,45),(15,23),(19,51),(21,25),(27,29),(35,37),(39,43),(41,49),(47,55),(53,57),(59,61),(0,1),(62,63)],
[(2,34),(4,36),(6,38),(8,40),(10,20),(14,46),(16,48),(22,54),(24,56),(26,58),(28,60),(42,52),(3,35),(5,37),(7,39),(9,41),(11,21),(15,47),(17,49),(23,55),(25,57),(27,59),(29,61),(43,53)],
[(6,34),(8,32),(10,42),(12,36),(14,18),(16,40),(20,52),(22,46),(26,50),(28,56),(30,54),(44,48),(7,35),(9,33),(11,43),(13,37),(15,19),(17,41),(21,53),(23,47),(27,51),(29,57),(31,55),(45,49)],
[(2,8),(6,16),(10,32),(14,34),(18,42),(20,44),(22,38),(24,40),(28,48),(30,52),(46,56),(54,60),(3,9),(7,17),(11,33),(15,35),(19,43),(21,45),(23,39),(25,41),(29,49),(31,53),(47,57),(55,61)],
[(4,10),(14,16),(18,36),(22,34),(24,32),(26,44),(28,40),(30,38),(46,48),(52,58),(5,11),(15,17),(19,37),(23,35),(25,33),(27,45),(29,41),(31,39),(47,49),(53,59),(2,3),(60,61)],
[(4,8),(12,24),(18,32),(20,22),(26,34),(28,36),(30,44),(38,50),(40,42),(54,58),(5,9),(13,25),(19,33),(21,23),(27,35),(29,37),(31,45),(39,51),(41,43),(55,59)],
[(10,12),(16,24),(18,20),(22,26),(28,32),(30,34),(36,40),(38,46),(42,44),(50,52),(11,13),(17,25),(19,21),(23,27),(29,33),(31,35),(37,41),(39,47),(43,45),(51,53),(4,5),(58,59)],
[(6,10),(12,14),(16,18),(20,24),(22,28),(26,32),(30,36),(34,40),(38,42),(44,46),(48,50),(52,56),(7,11),(13,15),(17,19),(21,25),(23,29),(27,33),(31,37),(35,41),(39,43),(45,47),(49,51),(53,57)],
[(6,8),(10,12),(14,16),(18,20),(22,24),(26,28),(30,32),(34,36),(38,40),(42,44),(46,48),(50,52),(54,56),(7,9),(11,13),(15,17),(19,21),(23,25),(27,29),(31,33),(35,37),(39,41),(43,45),(47,49),(51,53),(55,57)],
[(6,7),(8,9),(10,11),(12,13),(14,15),(16,17),(18,19),(20,21),(22,23),(24,25),(26,27),(28,29),(30,31),(32,33),(34,35),(36,37),(38,39),(40,41),(42,43),(44,45),(46,47),(48,49),(50,51),(52,53),(54,55),(56,57)],
[(1,32),(3,34),(5,36),(7,38),(9,40),(11,42),(13,44),(15,46),(17,48),(19,50),(21,52),(23,54),(25,56),(27,58),(29,60),(31,62)],
[(1,16),(3,18),(5,20),(7,22),(9,24),(11,26),(13,28),(15,30),(17,32),(19,34),(21,36),(23,38),(25,40),(27,42),(29,44),(31,46),(33,48),(35,50),(37,52),(39,54),(41,56),(43,58),(45,60),(47,62)],
[(1,8),(3,10),(5,12),(7,14),(9,16),(11,18),(13,20),(15,22),(17,24),(19,26),(21,28),(23,30),(25,32),(27,34),(29,36),(31,38),(33,40),(35,42),(37,44),(39,46),(41,48),(43,50),(45,52),(47,54),(49,56),(51,58),(53,60),(55,62)],
[(1,4),(3,6),(5,8),(7,10),(9,12),(11,14),(13,16),(15,18),(17,20),(19,22),(21,24),(23,26),(25,28),(27,30),(29,32),(31,34),(33,36),(35,38),(37,40),(39,42),(41,44),(43,46),(45,48),(47,50),(49,52),(51,54),(53,56),(55,58),(57,60),(59,62)],
[(1,2),(3,4),(5,6),(7,8),(9,10),(11,12),(13,14),(15,16),(17,18),(19,20),(21,22),(23,24),(25,26),(27,28),(29,30),(31,32),(33,34),(35,36),(37,38),(39,40),(41,42),(43,44),(45,46),(47,48),(49,50),(51,52),(53,54),(55,56),(57,58),(59,60),(61,62)],
]
}
| 118.221095
| 256
| 0.430228
| 28,040
| 116,566
| 1.788516
| 0.002425
| 0.006022
| 0.007298
| 0.008933
| 0.874377
| 0.852483
| 0.830967
| 0.821515
| 0.803948
| 0.794158
| 0
| 0.463804
| 0.072706
| 116,566
| 985
| 257
| 118.341117
| 0.000157
| 0.00018
| 0
| 0.242116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3d89d1dbf50b85ef2166d1b4d0029ab7ea9747fe
| 18,234
|
py
|
Python
|
tools/build_defs/docker/rewrite_json_test.py
|
andrefmrocha/bazel
|
0d2e409ec8eeadfde90b1860935af65d76c90966
|
[
"Apache-2.0"
] | 1
|
2021-12-16T08:19:50.000Z
|
2021-12-16T08:19:50.000Z
|
tools/build_defs/docker/rewrite_json_test.py
|
andrefmrocha/bazel
|
0d2e409ec8eeadfde90b1860935af65d76c90966
|
[
"Apache-2.0"
] | 3
|
2020-12-07T07:03:15.000Z
|
2021-02-04T14:06:14.000Z
|
tools/build_defs/docker/rewrite_json_test.py
|
andrefmrocha/bazel
|
0d2e409ec8eeadfde90b1860935af65d76c90966
|
[
"Apache-2.0"
] | 2
|
2020-05-28T22:56:41.000Z
|
2020-12-07T06:43:28.000Z
|
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing for rewrite_json."""
import unittest
from tools.build_defs.docker.rewrite_json import _DOCKER_VERSION
from tools.build_defs.docker.rewrite_json import _OPERATING_SYSTEM
from tools.build_defs.docker.rewrite_json import _PROCESSOR_ARCHITECTURE
from tools.build_defs.docker.rewrite_json import MetadataOptions
from tools.build_defs.docker.rewrite_json import RewriteMetadata
class RewriteJsonTest(unittest.TestCase):
"""Testing for rewrite_json."""
def testNewEntrypoint(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
entrypoint = ['/bin/bash']
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': entrypoint
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, entrypoint=entrypoint, parent=parent))
self.assertEquals(expected, actual)
def testOverrideEntrypoint(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/sh', 'does', 'not', 'matter'],
}
}
name = 'deadbeef'
parent = 'blah'
entrypoint = ['/bin/bash']
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': entrypoint
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, entrypoint=entrypoint, parent=parent))
self.assertEquals(expected, actual)
def testNewCmd(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/bash'],
}
}
name = 'deadbeef'
parent = 'blah'
cmd = ['/bin/bash']
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/bash'],
'Cmd': cmd
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, cmd=cmd, parent=parent))
self.assertEquals(expected, actual)
def testOverrideCmd(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/bash'],
'Cmd': ['does', 'not', 'matter'],
}
}
name = 'deadbeef'
parent = 'blah'
cmd = ['does', 'matter']
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/bash'],
'Cmd': cmd
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, cmd=cmd, parent=parent))
self.assertEquals(expected, actual)
def testOverrideBoth(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': ['/bin/sh'],
'Cmd': ['does', 'not', 'matter'],
}
}
name = 'deadbeef'
parent = 'blah'
entrypoint = ['/bin/bash', '-c']
cmd = ['my-command', 'my-arg1', 'my-arg2']
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Entrypoint': entrypoint,
'Cmd': cmd
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, entrypoint=entrypoint, cmd=cmd, parent=parent))
self.assertEquals(expected, actual)
def testOverrideParent(self):
name = 'me!'
parent = 'parent'
# In the typical case, we expect the parent to
# come in as the 'id', and our grandparent to
# be its 'parent'.
in_data = {
'id': parent,
'parent': 'grandparent',
}
expected = {
'id': name,
'parent': parent,
'config': {},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent))
self.assertEquals(expected, actual)
def testNewSize(self):
# Size is one of the few fields that, when omitted,
# should be removed.
in_data = {
'id': 'you',
'Size': '124',
}
name = 'me'
parent = 'blah'
size = '4321'
expected = {
'id': name,
'parent': parent,
'Size': size,
'config': {},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, size=size, parent=parent))
self.assertEquals(expected, actual)
def testOmitSize(self):
# Size is one of the few fields that, when omitted,
# should be removed.
in_data = {
'id': 'you',
'Size': '124',
}
name = 'me'
parent = 'blah'
expected = {
'id': name,
'parent': parent,
'config': {},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent))
self.assertEquals(expected, actual)
def testOmitName(self):
# Name is required.
with self.assertRaises(Exception):
RewriteMetadata({}, MetadataOptions(name=None))
def testStripContainerConfig(self):
# Size is one of the few fields that, when omitted,
# should be removed.
in_data = {
'id': 'you',
'container_config': {},
}
name = 'me'
parent = 'blah'
expected = {
'id': name,
'parent': parent,
'config': {},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent))
self.assertEquals(expected, actual)
def testEmptyBase(self):
in_data = {}
name = 'deadbeef'
entrypoint = ['/bin/bash', '-c']
cmd = ['my-command', 'my-arg1', 'my-arg2']
size = '999'
expected = {
'id': name,
'config': {
'Entrypoint': entrypoint,
'Cmd': cmd,
'ExposedPorts': {
'80/tcp': {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
'Size': size,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, entrypoint=entrypoint, cmd=cmd, size=size,
ports=['80']))
self.assertEquals(expected, actual)
def testOmitParentWithBase(self):
# Our input data should be empty when parent is omitted
in_data = {
'id': 'you',
}
with self.assertRaises(Exception):
RewriteMetadata(in_data, MetadataOptions(name='me'))
def testNewPort(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
port = '80'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
port + '/tcp': {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, ports=[port]))
self.assertEquals(expected, actual)
def testAugmentPort(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
'443/tcp': {}
}
}
}
name = 'deadbeef'
parent = 'blah'
port = '80'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
'443/tcp': {},
port + '/tcp': {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, ports=[port]))
self.assertEquals(expected, actual)
def testMultiplePorts(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
port1 = '80'
port2 = '8080'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
port1 + '/tcp': {},
port2 + '/tcp': {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, ports=[port1, port2]))
self.assertEquals(expected, actual)
def testPortCollision(self):
port = '80'
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
port + '/tcp': {}
}
}
}
name = 'deadbeef'
parent = 'blah'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
port + '/tcp': {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, ports=[port]))
self.assertEquals(expected, actual)
def testPortWithProtocol(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
port = '80/tcp'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'ExposedPorts': {
port: {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, ports=[port]))
self.assertEquals(expected, actual)
def testNewVolume(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
volume = '/logs'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': {
volume: {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, volumes=[volume]))
self.assertEquals(expected, actual)
def testAugmentVolume(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': {
'/original': {}
}
}
}
name = 'deadbeef'
parent = 'blah'
volume = '/data'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': {
'/original': {},
volume: {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, volumes=[volume]))
self.assertEquals(expected, actual)
def testMultipleVolumes(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
volume1 = '/input'
volume2 = '/output'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': {
volume1: {},
volume2: {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, volumes=[volume1, volume2]))
self.assertEquals(expected, actual)
def testEnv(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor'
}
}
name = 'deadbeef'
parent = 'blah'
env = [
'baz=blah',
'foo=bar',
]
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Env': env,
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, env=env, parent=parent))
self.assertEquals(expected, actual)
def testEnvResolveReplace(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Env': [
'foo=bar',
'baz=blah',
'blah=still around',
],
}
}
name = 'deadbeef'
parent = 'blah'
env = [
'baz=replacement',
'foo=$foo:asdf',
]
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Env': [
'baz=replacement',
'blah=still around',
'foo=bar:asdf',
],
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, env=env, parent=parent))
self.assertEquals(expected, actual)
def testAugmentVolumeWithNullInput(self):
in_data = {
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': None,
}
}
name = 'deadbeef'
parent = 'blah'
volume = '/data'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'mattmoor',
'WorkingDir': '/usr/home/mattmoor',
'Volumes': {
volume: {}
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, volumes=[volume]))
self.assertEquals(expected, actual)
def testSetWorkingDir(self):
in_data = {
'config': {
'User': 'bleh',
'WorkingDir': '/home/bleh',
'Volumes': {
}
}
}
name = 'deadbeef'
parent = 'blah'
workdir = '/some/path'
expected = {
'id': name,
'parent': parent,
'config': {
'User': 'bleh',
'WorkingDir': '/some/path',
'Volumes': {
}
},
'docker_version': _DOCKER_VERSION,
'architecture': _PROCESSOR_ARCHITECTURE,
'os': _OPERATING_SYSTEM,
}
actual = RewriteMetadata(in_data, MetadataOptions(
name=name, parent=parent, workdir=workdir))
self.assertEquals(expected, actual)
if __name__ == '__main__':
unittest.main()
| 26.77533
| 74
| 0.529121
| 1,561
| 18,234
| 6.035234
| 0.126201
| 0.029296
| 0.057743
| 0.095107
| 0.808301
| 0.776032
| 0.772423
| 0.763826
| 0.736652
| 0.730177
| 0
| 0.00501
| 0.332236
| 18,234
| 680
| 75
| 26.814706
| 0.768725
| 0.055556
| 0
| 0.711443
| 0
| 0
| 0.197998
| 0
| 0
| 0
| 0
| 0
| 0.039801
| 1
| 0.039801
| false
| 0
| 0.00995
| 0
| 0.05141
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ddd87c1ae2aa7934a39754d0a2ef13f80d4f350
| 3,526
|
py
|
Python
|
dm_page/migrations/0052_auto_20210909_2032.py
|
AlexanderHalsey/Donation-Manager
|
5655d1eb6394f7db9a178c081039c4e149ef6998
|
[
"Unlicense"
] | null | null | null |
dm_page/migrations/0052_auto_20210909_2032.py
|
AlexanderHalsey/Donation-Manager
|
5655d1eb6394f7db9a178c081039c4e149ef6998
|
[
"Unlicense"
] | null | null | null |
dm_page/migrations/0052_auto_20210909_2032.py
|
AlexanderHalsey/Donation-Manager
|
5655d1eb6394f7db9a178c081039c4e149ef6998
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-09-09 20:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('dm_page', '0051_auto_20210909_1942'),
]
operations = [
migrations.RemoveField(
model_name='paramètre',
name='donation_type',
),
migrations.RemoveField(
model_name='paramètre',
name='organisation',
),
migrations.AddField(
model_name='paramètre',
name='donation_type_1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='donation_type1', to='dm_page.donationtype', verbose_name='Type de don'),
),
migrations.AddField(
model_name='paramètre',
name='donation_type_2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='donation_type2', to='dm_page.donationtype', verbose_name='Type de don'),
),
migrations.AddField(
model_name='paramètre',
name='donation_type_3',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='donation_type3', to='dm_page.donationtype', verbose_name='Type de don'),
),
migrations.AddField(
model_name='paramètre',
name='donation_type_4',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='donation_type4', to='dm_page.donationtype', verbose_name='Type de don'),
),
migrations.AddField(
model_name='paramètre',
name='donation_type_5',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='donation_type5', to='dm_page.donationtype', verbose_name='Type de don'),
),
migrations.AddField(
model_name='paramètre',
name='organisation_1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organisation1', to='dm_page.organisation', verbose_name='Organisation'),
),
migrations.AddField(
model_name='paramètre',
name='organisation_2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organisation2', to='dm_page.organisation', verbose_name='Organisation'),
),
migrations.AddField(
model_name='paramètre',
name='organisation_3',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organisation3', to='dm_page.organisation', verbose_name='Organisation'),
),
migrations.AddField(
model_name='paramètre',
name='organisation_4',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organisation4', to='dm_page.organisation', verbose_name='Organisation'),
),
migrations.AddField(
model_name='paramètre',
name='organisation_5',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organisation5', to='dm_page.organisation', verbose_name='Organisation'),
),
]
| 48.30137
| 191
| 0.65485
| 400
| 3,526
| 5.555
| 0.1525
| 0.043204
| 0.09721
| 0.118812
| 0.879838
| 0.879838
| 0.837534
| 0.812781
| 0.782178
| 0.782178
| 0
| 0.018532
| 0.219512
| 3,526
| 72
| 192
| 48.972222
| 0.788881
| 0.012762
| 0
| 0.545455
| 1
| 0
| 0.217879
| 0.006611
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ac0a1ac4e46de145006a576dede6d5ffee7d750
| 16,580
|
py
|
Python
|
non-structured_BATFERM_Dunn_Monod_slider.py
|
CarlosCaicedoM/IRB
|
e1539455ceb191033d4b7670faf588d6442d0c61
|
[
"MIT"
] | null | null | null |
non-structured_BATFERM_Dunn_Monod_slider.py
|
CarlosCaicedoM/IRB
|
e1539455ceb191033d4b7670faf588d6442d0c61
|
[
"MIT"
] | null | null | null |
non-structured_BATFERM_Dunn_Monod_slider.py
|
CarlosCaicedoM/IRB
|
e1539455ceb191033d4b7670faf588d6442d0c61
|
[
"MIT"
] | null | null | null |
#%% Cambiando solo mu
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 23 17:35:23 2019
@author: Carlos Caicedo-Montoya
Cinetica de producción -modelo monod-reactor batch
I. J. Dunn, E. Heinzle, J. Ingham, J. E. Pfenosil
Biological Reaction Engineering
Dynamic Modelling Fundamentals
with Simulation Examples
Second, Completely Revised Edition
Batch Fermentation (BATFERM)
"""
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
#Definir el modelo
def batch_monod(t, C, UM):
X, S, P = C
if S > 0 :
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h Producto asociado
#directamente con el metabolismo energetico
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = RS
dP_dt = RP
else:
S = 0
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = 0
dP_dt = RP
return [dX_dt, dS_dt, dP_dt]
#Datos del modelo
UM=0.3 #1/h
Kd=0.0001 # 1/h
ks=0.000001 #g/L
Ypx=7.7 #gP/gX h
mp=1.1 #gP/gX h
Yxs=0.06 #gX/gS
ms= 2.2 #gX/gs
Xo=0.1 #g/L
So=12 #g/L
Po=0 #g/L
#Condiciones Iniciales
C_init = np.array([Xo, So, Po])
t_lim = (0, 10) #min
t_array = np.linspace(0, 10, num=100)
# Resolver el modelo
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array, args=(0.3,))
t_array = sol.t
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
#Graficar perfiles de concentración
figure1, ((ax1, ax2, ax3)) = plt.subplots(3, 1)
line1, = ax1.plot(t_array, Biomasa, label = "Biomasa", color = "darkorange")
ax1.legend()
ax1.set_xlabel(r'$tiempo (h)$')
ax1.set_ylabel(r'$[X] \/ (g \/ L ^{-1})$')
ax1.grid()
line2, = ax2.plot(t_array, Sustrato, label = "Sustrato", color = "darkblue")
ax2.legend()
ax2.set_xlabel(r'$tiempo (h)$')
ax2.set_ylabel(r'$[S] \/ (g \/ L ^{-1})$')
ax2.grid()
line3, = ax3.plot(t_array, Producto, label = "Producto", color = "red")
ax3.legend()
ax3.set_xlabel(r'$tiempo (h)$')
ax3.set_ylabel(r'$[P] \/ (g \/ L ^{-1})$')
ax3.grid()
# adjust the main plot to make room for the sliders
plt.subplots_adjust(left=0.3)
axcolor = 'lightcoral'
ax_miu = plt.axes([0.05, 0.5, 0.15, 0.05], facecolor=axcolor)
s_miu= Slider(ax = ax_miu,
label = r'$\mu$ ($h ^ {-1}$)',
valmin = 0.1,
valmax = 1,
valinit=UM,
valfmt='%1.1f')
def update(val):
UM= s_miu.val
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array, args=(UM, ))
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
line1.set_ydata(Biomasa)
line2.set_ydata(Sustrato)
line3.set_ydata(Producto)
figure1.canvas.draw_idle()
# register the update function with each slider
s_miu.on_changed(update)
# Create a `matplotlib.widgets.Button` to reset the sliders to initial values.
resetax = plt.axes([0.1, 0.84, 0.09, 0.04])
button = Button(resetax, 'Reset variables', color='cornflowerblue', hovercolor='0.975')
def reset(event):
s_miu.reset()
button.on_clicked(reset)
#%% cambiando la velocidad de crecimiento y la afinidas por el sustrato
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
#Definir el modelo
def batch_monod(t, C, UM, ks):
X, S, P = C
if S > 0 :
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h Producto asociado
#directamente con el metabolismo energetico
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = RS
dP_dt = RP
else:
S = 0
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = 0
dP_dt = RP
return [dX_dt, dS_dt, dP_dt]
#Datos del modelo
UM=0.3 #1/h
Kd=0.0001 # 1/h
ks=0.000001 #g/L
Ypx=7.7 #gP/gX h
mp=1.1 #gP/gX h
Yxs=0.06 #gX/gS
ms= 2.2 #gX/gs
Xo=0.1 #g/L
So=12 #g/L
Po=0 #g/L
#Condiciones Iniciales
C_init = np.array([Xo, So, Po])
t_lim = (0, 10) #min
t_array = np.linspace(0, 10, num=100)
# Resolver el modelo
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array, args=(0.3, 0.000001))
t_array = sol.t
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
#Graficar perfiles de concentración
figure1, ((ax1, ax2, ax3)) = plt.subplots(3, 1)
line1, = ax1.plot(t_array, Biomasa, label = "Biomasa", color = "darkorange")
ax1.legend()
ax1.set_xlabel(r'$tiempo (h)$')
ax1.set_ylabel(r'$[X] \/ (g \/ L ^{-1})$')
ax1.grid()
line2, = ax2.plot(t_array, Sustrato, label = "Sustrato", color = "darkblue")
ax2.legend()
ax2.set_xlabel(r'$tiempo (h)$')
ax2.set_ylabel(r'$[S] \/ (g \/ L ^{-1})$')
ax2.grid()
line3, = ax3.plot(t_array, Producto, label = "Producto", color = "red")
ax3.legend()
ax3.set_xlabel(r'$tiempo (h)$')
ax3.set_ylabel(r'$[P] \/ (g \/ L ^{-1})$')
ax3.grid()
# adjust the main plot to make room for the sliders
plt.subplots_adjust(left=0.3)
axcolor = 'lightcoral'
ax_miu = plt.axes([0.05, 0.5, 0.15, 0.05], facecolor=axcolor)
ax_ks = plt.axes([0.05, 0.3, 0.15, 0.05], facecolor=axcolor)
s_miu= Slider(ax = ax_miu,
label = r'$\mu$ ($h ^ {-1}$)',
valmin = 0.1,
valmax = 1,
valinit=UM,
valfmt='%1.1f')
s_ks= Slider(ax = ax_ks,
label = r'$K_s$ ($g \/L ^ {-1}$)',
valmin = 0.000001 ,
valmax = 5,
valinit=ks,
valfmt='%1.1f')
def update(val):
UM= s_miu.val
ks = s_ks.val
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array, args=(UM, ks))
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
line1.set_ydata(Biomasa)
line2.set_ydata(Sustrato)
line3.set_ydata(Producto)
figure1.canvas.draw_idle()
# register the update function with each slider
s_miu.on_changed(update)
s_ks.on_changed(update)
# Create a `matplotlib.widgets.Button` to reset the sliders to initial values.
resetax = plt.axes([0.1, 0.84, 0.09, 0.04])
button = Button(resetax, 'Reset variables', color='cornflowerblue', hovercolor='0.975')
def reset(event):
s_miu.reset()
s_ks.reset()
button.on_clicked(reset)
#%% Cambiando todos los parámetros
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
#Definir el modelo
def batch_monod(t, C, UM, Kd, ks, Ypx, mp, Yxs, ms):
X, S, P = C
if S > 0 :
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h Producto asociado
#directamente con el metabolismo energetico
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = RS
dP_dt = RP
else:
S = 0
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = 0
dP_dt = RP
return [dX_dt, dS_dt, dP_dt]
#Datos del modelo
UM=0.3 #1/h
Kd=0.0001 # 1/h
ks=0.000001 #g/L
Ypx=7.7 #gP/gX h
mp=1.1 #gP/gX h
Yxs=0.06 #gX/gS
ms= 2.2 #gX/gs
Xo=0.1 #g/L
So=12 #g/L
Po=0 #g/L
#Condiciones Iniciales
C_init = np.array([Xo, So, Po])
t_lim = (0, 10) #min
t_array = np.linspace(0, 10, num=100)
# Resolver el modelo
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array,
args=(UM, Kd, ks, Ypx, mp, Yxs, ms))
t_array = sol.t
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
#Graficar perfiles de concentración
figure1, ((ax1, ax2, ax3)) = plt.subplots(3, 1)
line1, = ax1.plot(t_array, Biomasa, label = "Biomasa", color = "darkorange")
ax1.legend()
ax1.set_xlabel(r'$tiempo (h)$')
ax1.set_ylabel(r'$[X] \/ (g \/ L ^{-1})$')
ax1.grid()
line2, = ax2.plot(t_array, Sustrato, label = "Sustrato", color = "darkblue")
ax2.legend()
ax2.set_xlabel(r'$tiempo (h)$')
ax2.set_ylabel(r'$[S] \/ (g \/ L ^{-1})$')
ax2.grid()
line3, = ax3.plot(t_array, Producto, label = "Producto", color = "red")
ax3.legend()
ax3.set_xlabel(r'$tiempo (h)$')
ax3.set_ylabel(r'$[P] \/ (g \/ L ^{-1})$')
ax3.grid()
# adjust the main plot to make room for the sliders
plt.subplots_adjust(left=0.3)
axcolor = 'lightcoral'
ax_miu = plt.axes([0.05, 0.8, 0.15, 0.05], facecolor=axcolor)
ax_Kd = plt.axes([0.05, 0.7, 0.15, 0.05], facecolor=axcolor)
ax_ks = plt.axes([0.05, 0.6, 0.15, 0.05], facecolor=axcolor)
ax_Ypx = plt.axes([0.05, 0.5, 0.15, 0.05], facecolor=axcolor)
ax_mp = plt.axes([0.05, 0.4, 0.15, 0.05], facecolor=axcolor)
ax_Yxs = plt.axes([0.05, 0.3, 0.15, 0.05], facecolor=axcolor)
ax_ms = plt.axes([0.05, 0.2, 0.15, 0.05], facecolor=axcolor)
s_miu= Slider(ax = ax_miu,
label = r'$\mu$ ($h ^ {-1}$)',
valmin = 0.1,
valmax = 1,
valinit=UM,
valfmt='%1.1f')
s_Kd= Slider(ax = ax_Kd, label = r'$K_d$ ($ \/h ^ {-1}$)', valmin = 0.000001 ,
valmax = 0.5, valinit=Kd, valfmt='%1.1f')
s_ks= Slider(ax = ax_ks,
label = r'$K_s$ ($g \/L ^ {-1}$)',
valmin = 0.000001 ,
valmax = 5,
valinit=ks,
valfmt='%1.1f')
s_Ypx= Slider(ax = ax_Ypx, label = r'$Y_{PX}$ ($ gP\/gX ^ {-1}$)', valmin = 1 ,
valmax = 10, valinit=Kd, valfmt='%1.1f')
s_mp= Slider(ax = ax_mp, label = r'$m_{p}$ ($ gP\/gX ^ {-1}$)', valmin = 0.5 ,
valmax = 2, valinit=mp, valfmt='%1.1f')
s_Yxs= Slider(ax = ax_Yxs, label = r'$Y_{XS}$ ($ gX\/gS ^ {-1}$)', valmin = 0.001 ,
valmax = 1, valinit=Yxs, valfmt='%1.1f')
s_ms= Slider(ax = ax_ms, label = r'$m_{S}$ ($ gS\/gS ^ {-1}$)', valmin = 0.5 ,
valmax = 5, valinit=ms, valfmt='%1.1f')
def update(val):
UM = s_miu.val
Kd = s_Kd.val
ks = s_ks.val
Ypx = s_Ypx.val
mp = s_mp.val
Yxs = s_Yxs.val
ms = s_ms.val
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array,
args=(UM, Kd, ks, Ypx, mp, Yxs, ms))
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
line1.set_ydata(Biomasa)
line2.set_ydata(Sustrato)
line3.set_ydata(Producto)
figure1.canvas.draw_idle()
# register the update function with each slider
s_miu.on_changed(update)
s_Kd.on_changed(update)
s_ks.on_changed(update)
s_Ypx.on_changed(update)
s_mp.on_changed(update)
s_Yxs.on_changed(update)
s_ms.on_changed(update)
# Create a `matplotlib.widgets.Button` to reset the sliders to initial values.
resetax = plt.axes([0.1, 0.9, 0.09, 0.04])
button = Button(resetax, 'Reset variables', color='cornflowerblue', hovercolor='0.975')
def reset(event):
s_miu.reset()
s_Kd.reset()
s_ks.reset()
s_Ypx.reset()
s_mp.reset()
s_Yxs.rese()
s_ms.reset()
button.on_clicked(reset)
#%% Cambiando las condiciones iniciales
import numpy as np
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
#Definir el modelo
def batch_monod(t, C):
X, S, P = C
if S > 0 :
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h Producto asociado
#directamente con el metabolismo energetico
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = RS
dP_dt = RP
else:
S = 0
U = UM*S/(ks+S) #;MONOD EQUATION, 1/h
RX = (U-Kd)*X #BIOMASS RATE EQUATION, kg/m3 h
RS = -(U/Yxs + ms)*X #SUBSTRATE RATE EQUATION, kg/m3 h
RP = (mp+Ypx*U)*X #PRODUCT RATE EQUATION, g/m3 h
dX_dt = RX
dS_dt = 0
dP_dt = RP
return [dX_dt, dS_dt, dP_dt]
#Datos del modelo
UM=0.3 #1/h
Kd=0.0001 # 1/h
ks=0.000001 #g/L
Ypx=7.7 #gP/gX h
mp=1.1 #gP/gX h
Yxs=0.06 #gX/gS
ms= 2.2 #gX/gs
Xo=0.1 #g/L
So=12 #g/L
Po=0 #g/L
#Condiciones Iniciales
C_init = np.array([Xo, So, Po])
t_lim = (0, 10) #min
t_array = np.linspace(0, 10, num=100)
# Resolver el modelo
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array)
t_array = sol.t
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
#Graficar perfiles de concentración
figure1, ((ax1, ax2, ax3)) = plt.subplots(3, 1)
line1, = ax1.plot(t_array, Biomasa, label = "Biomasa", color = "darkorange")
ax1.legend()
ax1.set_xlabel(r'$tiempo (h)$')
ax1.set_ylabel(r'$[X] \/ (g \/ L ^{-1})$')
ax1.grid()
ax1.set_ylim([0, 2])
line2, = ax2.plot(t_array, Sustrato, label = "Sustrato", color = "darkblue")
ax2.legend()
ax2.set_xlabel(r'$tiempo (h)$')
ax2.set_ylabel(r'$[S] \/ (g \/ L ^{-1})$')
ax2.grid()
ax2.set_ylim([0, 20])
line3, = ax3.plot(t_array, Producto, label = "Producto", color = "red")
ax3.legend()
ax3.set_xlabel(r'$tiempo (h)$')
ax3.set_ylabel(r'$[P] \/ (g \/ L ^{-1})$')
ax3.grid()
ax3.set_ylim([0, 20])
# adjust the main plot to make room for the sliders
plt.subplots_adjust(left=0.3)
axcolor = 'azure'
ax_X0 = plt.axes([0.05, 0.7, 0.15, 0.05], facecolor=axcolor)
ax_S0 = plt.axes([0.05, 0.6, 0.15, 0.05], facecolor=axcolor)
ax_P0 = plt.axes([0.05, 0.5, 0.15, 0.05], facecolor=axcolor)
s_X0= Slider(ax = ax_X0, label = r'$X_{0}$ ($ g\/L ^ {-1}$)', valmin = 0.1 ,
valmax = 1, valinit=Xo, valfmt='%1.1f')
s_S0= Slider(ax = ax_S0, label = r'$S_{0}$ ($ g\/L ^ {-1}$)', valmin = 1 ,
valmax = 50, valinit=So, valfmt='%1.1f')
s_P0= Slider(ax = ax_P0, label = r'$P_{o}$ ($ g\/L ^ {-1}$)', valmin = 0 ,
valmax = 1, valinit=Po, valfmt='%1.1f')
def update(val):
Xo = s_X0.val
So = s_S0.val
Po= s_P0.val
C_init = np.array([Xo, So, Po])
sol = solve_ivp(batch_monod,
t_lim,
C_init,
t_eval=t_array)
conc_array = sol.y
Biomasa = conc_array[0]
Sustrato = conc_array[1]
Producto = conc_array[2]
line1.set_ydata(Biomasa)
line2.set_ydata(Sustrato)
line3.set_ydata(Producto)
figure1.canvas.draw_idle()
# register the update function with each slider
s_X0.on_changed(update)
s_S0.on_changed(update)
s_P0.on_changed(update)
# Create a `matplotlib.widgets.Button` to reset the sliders to initial values.
resetax = plt.axes([0.1, 0.9, 0.09, 0.04])
button = Button(resetax, 'Reset variables', color='cornflowerblue', hovercolor='0.975')
def reset(event):
s_X0.reset()
s_S0.reset()
s_P0.reset()
button.on_clicked(reset)
| 27.359736
| 88
| 0.562364
| 2,652
| 16,580
| 3.40083
| 0.089744
| 0.007318
| 0.005655
| 0.028385
| 0.88746
| 0.872381
| 0.856192
| 0.84455
| 0.84067
| 0.84067
| 0
| 0.059522
| 0.283595
| 16,580
| 605
| 89
| 27.404959
| 0.699781
| 0.178046
| 0
| 0.852804
| 0
| 0
| 0.087843
| 0
| 0
| 0
| 0
| 0.001653
| 0
| 1
| 0.028037
| false
| 0
| 0.037383
| 0
| 0.074766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ac9cdd784245d97af1094ca7263065025cc7134
| 16,367
|
py
|
Python
|
airboard/views.py
|
alyssflynn/airboard
|
9ae8ba0ae4b76cb52ede961fd15f8d86451cf626
|
[
"MIT"
] | 1
|
2018-08-15T21:41:23.000Z
|
2018-08-15T21:41:23.000Z
|
airboard/views.py
|
alyssflynn/airboard
|
9ae8ba0ae4b76cb52ede961fd15f8d86451cf626
|
[
"MIT"
] | null | null | null |
airboard/views.py
|
alyssflynn/airboard
|
9ae8ba0ae4b76cb52ede961fd15f8d86451cf626
|
[
"MIT"
] | 2
|
2018-08-26T19:38:23.000Z
|
2018-09-25T19:05:43.000Z
|
import os, inspect
from flask import (
render_template,
Response,
request,
json,
jsonify)
import pandas as pd
from airboard import app
from airboard.data import (
session,
query_stats_by_state,
query_stats_by_city,
query_stats_by_airport,
query_topn_outgoing_by_state,
query_topn_outgoing_by_city,
query_topn_outgoing_by_airport,
query_topn_outgoing_by_carrier,
query_summary,
query_filtered_options,
)
# get project root dir
CURR_DIR = os.path.dirname(inspect.getabsfile(inspect.currentframe()))
ROOT_DIR = os.path.dirname(CURR_DIR)
TOPN = 5
@app.teardown_request
def remove_session(ex=None):
session.remove()
# home route
@app.route('/home')
@app.route('/')
def home():
# read airports data
# fname = os.path.join(CURR_DIR, "data", "ext", "616228237_AIRPORT_MASTER_CORD_CLEAN_V0.json")
# airports_data = json.load(open(fname))
#
# # read US states data
# fname = os.path.join(CURR_DIR, "data", "ext", "616228237_STATE_CORD_V0.json")
# states_data = json.load(open(fname))
return render_template("index.html")
@app.route("/data/state")
def get_state_data():
fname = os.path.join(CURR_DIR, "data", "ext", "616228237_STATE_CORD_V0.json")
states_data = json.load(open(fname))
return jsonify(states_data)
@app.route("/data/city")
def get_city_data():
fname = os.path.join(CURR_DIR, "data", "ext", "616228237_CITY_CORD_V0.json")
states_data = json.load(open(fname))
return jsonify(states_data)
@app.route("/data/airport")
def get_airport_data():
fname = os.path.join(CURR_DIR, "data", "ext", "616228237_AIRPORT_MASTER_CORD_CLEAN_V0.json")
states_data = json.load(open(fname))
return jsonify(states_data)
@app.route("/data/carrier")
def get_carrier_data():
fname = os.path.join(CURR_DIR, "data", "ext", "616228237_CARRIER_MASTER_CORD_CLEAN_V0.json")
states_data = json.load(open(fname))
return jsonify(states_data)
@app.route("/data/summary_stats.json/<year>")
def get_summary_stats(year):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport paramters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_summary(year=year,
month=month,
origin=origin,
dest=dest,
carrier=carrier,)
return jsonify(d)
@app.route("/data/filtered_options.json/<year>")
def get_filtered_options(year):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport paramters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_filtered_options(year=year,
month=month,
origin=origin,
dest=dest,
carrier=carrier,)
return jsonify(d)
@app.route("/data/state/market_domestic_stats.json/<year>")
def get_state_stats(year):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport paramters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_stats_by_state(year=year,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
sort_by=None)
return jsonify(d)
@app.route("/data/city/market_domestic_stats.json/<year>")
def get_city_stats(year):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport paramters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_stats_by_city(year=year,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
sort_by=None)
return jsonify(d)
@app.route("/data/airport/market_domestic_stats.json/<year>")
def get_airport_stats(year):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport paramters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_stats_by_airport(year=year,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
sort_by=None)
return jsonify(d)
@app.route("/data/state/out/topn_stats.json/<year>/<state_code>/<sort_by>")
def get_state_topn_outgoing_stats(year, state_code, sort_by):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport parameters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_topn_outgoing_by_state(year=year,
state_code=state_code,
sort_by=sort_by,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
topn=TOPN)
return jsonify(d)
@app.route("/data/city/out/topn_stats.json/<year>/<city>/<sort_by>")
def get_city_topn_outgoing_stats(year, city, sort_by):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport parameters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_topn_outgoing_by_city(year=year,
city=city,
sort_by=sort_by,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
topn=TOPN)
return jsonify(d)
@app.route("/data/airport/out/topn_stats.json/<year>/<airport_code>/<sort_by>")
def get_airport_topn_outgoing_stats(year, airport_code, sort_by):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport parameters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_topn_outgoing_by_airport(year=year,
airport_code=airport_code,
sort_by=sort_by,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
topn=TOPN)
return jsonify(d)
@app.route("/data/carrier/out/topn_stats.json/<year>/<sort_by>")
def get_topn_carrier_outgoing_stats(year, sort_by):
# parse month
month = [request.args.get("month", default=None, type=int)]
# parse origin airport parameters
origin = dict()
origin.update({"country": [request.args.get('origin_country', default=None, type=str)]})
origin.update({"state_code": [request.args.get('origin_state', default=None, type=str)]})
origin.update({"city": [request.args.get('origin_city', default=None, type=str)]})
origin.update({"airport_code": [request.args.get('origin_airport_code', default=None, type=str)]})
# parse destination airport parameter
dest = dict()
dest.update({"country": [request.args.get('dest_country', default=None, type=str)]})
dest.update({"state_code": [request.args.get('dest_state', default=None, type=str)]})
dest.update({"city": [request.args.get('dest_city', default=None, type=str)]})
dest.update({"airport_code": [request.args.get('dest_airport_code', default=None, type=str)]})
# parse career
carrier = dict()
carrier.update({"code": [request.args.get("carrier_code", default=None, type=str)]})
carrier.update({"name": [request.args.get("carrier_name", default=None, type=str)]})
d = query_topn_outgoing_by_carrier(year=year,
sort_by=sort_by,
month=month,
origin=origin,
dest=dest,
carrier=carrier,
topn=TOPN)
return jsonify(d)
| 41.435443
| 102
| 0.624
| 2,022
| 16,367
| 4.892681
| 0.044016
| 0.110078
| 0.140099
| 0.163752
| 0.911554
| 0.885374
| 0.879713
| 0.866471
| 0.866471
| 0.866471
| 0
| 0.004753
| 0.215922
| 16,367
| 394
| 103
| 41.540609
| 0.76615
| 0.070263
| 0
| 0.722433
| 0
| 0
| 0.166535
| 0.037711
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057034
| false
| 0
| 0.019011
| 0.003802
| 0.129278
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9acb32df5708546f0c393762e0606df11c50e241
| 2,473
|
py
|
Python
|
tests/numpy/methods.py
|
RoboticExplorationLab/micropython-ulab
|
b0679e6d16d87f5acb09dee690a71a54d4c2892b
|
[
"MIT"
] | null | null | null |
tests/numpy/methods.py
|
RoboticExplorationLab/micropython-ulab
|
b0679e6d16d87f5acb09dee690a71a54d4c2892b
|
[
"MIT"
] | null | null | null |
tests/numpy/methods.py
|
RoboticExplorationLab/micropython-ulab
|
b0679e6d16d87f5acb09dee690a71a54d4c2892b
|
[
"MIT"
] | null | null | null |
try:
from ulab import numpy as np
use_ulab = True
except ImportError:
import numpy as np
use_ulab = False
a = np.array([1, 2, 3, 4], dtype=np.int8)
b = a.copy()
print(b)
a = np.array([[1,2,3],[4,5,6],[7,8,9]], dtype=np.int16)
b = a.copy()
print(b)
a = np.array([[1,2,3],[4,5,6],[7,8,9]], dtype=np.float)
b = a.copy()
print(b)
if use_ulab:
print(a.dtype())
print(a.flatten())
print(np.array([1,2,3], dtype=np.uint8).itemsize())
print(np.array([1,2,3], dtype=np.uint16).itemsize())
print(np.array([1,2,3], dtype=np.int8).itemsize())
print(np.array([1,2,3], dtype=np.int16).itemsize())
print(np.array([1,2,3], dtype=np.float).itemsize())
print(np.array([1,2,3], dtype=np.float).shape())
print(np.array([[1],[2],[3]], dtype=np.float).shape())
print(np.array([[1],[2],[3]], dtype=np.float).reshape((1,3)))
print(np.array([[1],[2],[3]], dtype=np.float).size())
print(np.array([1,2,3], dtype=np.float).size())
print(np.array([1,2,3], dtype=np.uint8).tobytes())
print(np.array([1,2,3], dtype=np.int8).tobytes())
print(np.array([1,2,3], dtype=np.float).transpose().shape())
print(np.array([[1],[2],[3]], dtype=np.float).transpose().shape())
else:
print(a.dtype)
print(a.flatten())
print(np.array([1,2,3], dtype=np.uint8).itemsize)
print(np.array([1,2,3], dtype=np.uint16).itemsize)
print(np.array([1,2,3], dtype=np.int8).itemsize)
print(np.array([1,2,3], dtype=np.int16).itemsize)
print(np.array([1,2,3], dtype=np.float).itemsize)
print(np.array([1,2,3], dtype=np.float).shape)
print(np.array([[1],[2],[3]], dtype=np.float).shape)
print(np.array([[1],[2],[3]], dtype=np.float).reshape((1,3)))
print(np.array([[1],[2],[3]]).size)
print(np.array([1,2,3], dtype=np.float).size)
print(np.array([1,2,3], dtype=np.uint8).tobytes())
print(np.array([1,2,3], dtype=np.int8).tobytes())
print(np.array([1,2,3], dtype=np.float).transpose().shape)
print(np.array([[1],[2],[3]], dtype=np.float).transpose().shape)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.uint8)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.uint16)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.float)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
| 34.347222
| 70
| 0.607359
| 465
| 2,473
| 3.223656
| 0.094624
| 0.158773
| 0.181454
| 0.204136
| 0.948632
| 0.933956
| 0.904603
| 0.896598
| 0.896598
| 0.896598
| 0
| 0.069477
| 0.126971
| 2,473
| 71
| 71
| 34.830986
| 0.624826
| 0
| 0
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042857
| 0
| 0.042857
| 0.671429
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
9af24884b8fcd443f3b90a3be838baf9c9d819a7
| 104
|
py
|
Python
|
sudachipy/__init__.py
|
Kensuke-Mitsuzawa/SudachiPy
|
14f3842537c61b7617d8853cb7f79ef7a86114c5
|
[
"Apache-2.0"
] | null | null | null |
sudachipy/__init__.py
|
Kensuke-Mitsuzawa/SudachiPy
|
14f3842537c61b7617d8853cb7f79ef7a86114c5
|
[
"Apache-2.0"
] | null | null | null |
sudachipy/__init__.py
|
Kensuke-Mitsuzawa/SudachiPy
|
14f3842537c61b7617d8853cb7f79ef7a86114c5
|
[
"Apache-2.0"
] | null | null | null |
from sudachipy import utf8inputtextbuilder
from sudachipy import tokenizer
from sudachipy import config
| 26
| 42
| 0.884615
| 12
| 104
| 7.666667
| 0.5
| 0.423913
| 0.619565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.115385
| 104
| 3
| 43
| 34.666667
| 0.98913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b11cfcc068e8ee330463067a858462ede77e95ad
| 69,850
|
py
|
Python
|
huaweicloud-sdk-cce/huaweicloudsdkcce/v3/cce_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-cce/huaweicloudsdkcce/v3/cce_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-cce/huaweicloudsdkcce/v3/cce_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class CceClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(CceClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkcce.v3.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "CceClient":
raise TypeError("client type error, support client type is CceClient")
return ClientBuilder(clazz)
def add_node(self, request):
"""纳管节点
该API用于在指定集群下纳管节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param AddNodeRequest request
:return: AddNodeResponse
"""
return self.add_node_with_http_info(request)
def add_node_with_http_info(self, request):
"""纳管节点
该API用于在指定集群下纳管节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param AddNodeRequest request
:return: AddNodeResponse
"""
all_params = ['cluster_id', 'add_node_list']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/add',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AddNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def awake_cluster(self, request):
"""集群唤醒
集群唤醒用于唤醒已休眠的集群,唤醒后,将继续收取控制节点资源费用。
:param AwakeClusterRequest request
:return: AwakeClusterResponse
"""
return self.awake_cluster_with_http_info(request)
def awake_cluster_with_http_info(self, request):
"""集群唤醒
集群唤醒用于唤醒已休眠的集群,唤醒后,将继续收取控制节点资源费用。
:param AwakeClusterRequest request
:return: AwakeClusterResponse
"""
all_params = ['cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/operation/awake',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AwakeClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_addon_instance(self, request):
"""创建AddonInstance
根据提供的插件模板,安装插件实例。
:param CreateAddonInstanceRequest request
:return: CreateAddonInstanceResponse
"""
return self.create_addon_instance_with_http_info(request)
def create_addon_instance_with_http_info(self, request):
"""创建AddonInstance
根据提供的插件模板,安装插件实例。
:param CreateAddonInstanceRequest request
:return: CreateAddonInstanceResponse
"""
all_params = ['create_addon_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addons',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateAddonInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_cloud_persistent_volume_claims(self, request):
"""创建PVC
该API用于在指定的Namespace下通过云存储服务中的云存储(EVS、SFS、OBS)去创建PVC(PersistentVolumeClaim)。 >存储管理的URL格式为:https://{clusterid}.Endpoint/uri。其中{clusterid}为集群ID,uri为资源路径,也即API访问的路径。如果使用https://Endpoint/uri,则必须指定请求header中的X-Cluster-ID参数。
:param CreateCloudPersistentVolumeClaimsRequest request
:return: CreateCloudPersistentVolumeClaimsResponse
"""
return self.create_cloud_persistent_volume_claims_with_http_info(request)
def create_cloud_persistent_volume_claims_with_http_info(self, request):
"""创建PVC
该API用于在指定的Namespace下通过云存储服务中的云存储(EVS、SFS、OBS)去创建PVC(PersistentVolumeClaim)。 >存储管理的URL格式为:https://{clusterid}.Endpoint/uri。其中{clusterid}为集群ID,uri为资源路径,也即API访问的路径。如果使用https://Endpoint/uri,则必须指定请求header中的X-Cluster-ID参数。
:param CreateCloudPersistentVolumeClaimsRequest request
:return: CreateCloudPersistentVolumeClaimsResponse
"""
all_params = ['namespace', 'create_cloud_persistent_volume_claims_request_body', 'x_cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'namespace' in local_var_params:
path_params['namespace'] = local_var_params['namespace']
query_params = []
header_params = {}
if 'x_cluster_id' in local_var_params:
header_params['X-Cluster-ID'] = local_var_params['x_cluster_id']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v1/namespaces/{namespace}/cloudpersistentvolumeclaims',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateCloudPersistentVolumeClaimsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_cluster(self, request):
"""创建集群
该API用于创建一个空集群(即只有控制节点Master,没有工作节点Node)。请在调用本接口完成集群创建之后,通过[[创建节点](https://support.huaweicloud.com/api-cce/cce_02_0242.html)](tag:hws)[[创建节点](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0242.html)](tag:hws_hk)添加节点。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。 > - 调用该接口创建集群时,默认不安装ICAgent。ICAgent是应用性能管理APM的采集代理,运行在应用所在的服务器上,用于实时采集探针所获取的数据,安装ICAgent是使用应用性能管理APM的前提。若需安装ICAgent,请参照[[安装ICAgent](https://support.huaweicloud.com/usermanual-apm/apm_02_0013.html)](tag:hws)[[安装ICAgent](https://support.huaweicloud.com/intl/zh-cn/usermanual-apm/apm_02_0013.html)](tag:hws_hk)。 > - 默认情况下,一个帐户只能创建5个集群(每个Region下),如果您需要创建更多的集群,请[[提交工单](https://console.huaweicloud.com/console/#/quota)](tag:hws)[[提交工单](https://console-intl.huaweicloud.com/console/?locale=zh-cn#/quota)](tag:hws_hk)申请增加配额。
:param CreateClusterRequest request
:return: CreateClusterResponse
"""
return self.create_cluster_with_http_info(request)
def create_cluster_with_http_info(self, request):
"""创建集群
该API用于创建一个空集群(即只有控制节点Master,没有工作节点Node)。请在调用本接口完成集群创建之后,通过[[创建节点](https://support.huaweicloud.com/api-cce/cce_02_0242.html)](tag:hws)[[创建节点](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0242.html)](tag:hws_hk)添加节点。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。 > - 调用该接口创建集群时,默认不安装ICAgent。ICAgent是应用性能管理APM的采集代理,运行在应用所在的服务器上,用于实时采集探针所获取的数据,安装ICAgent是使用应用性能管理APM的前提。若需安装ICAgent,请参照[[安装ICAgent](https://support.huaweicloud.com/usermanual-apm/apm_02_0013.html)](tag:hws)[[安装ICAgent](https://support.huaweicloud.com/intl/zh-cn/usermanual-apm/apm_02_0013.html)](tag:hws_hk)。 > - 默认情况下,一个帐户只能创建5个集群(每个Region下),如果您需要创建更多的集群,请[[提交工单](https://console.huaweicloud.com/console/#/quota)](tag:hws)[[提交工单](https://console-intl.huaweicloud.com/console/?locale=zh-cn#/quota)](tag:hws_hk)申请增加配额。
:param CreateClusterRequest request
:return: CreateClusterResponse
"""
all_params = ['create_cluster_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_kubernetes_cluster_cert(self, request):
"""获取集群证书
该API用于获取指定集群的证书信息。
:param CreateKubernetesClusterCertRequest request
:return: CreateKubernetesClusterCertResponse
"""
return self.create_kubernetes_cluster_cert_with_http_info(request)
def create_kubernetes_cluster_cert_with_http_info(self, request):
"""获取集群证书
该API用于获取指定集群的证书信息。
:param CreateKubernetesClusterCertRequest request
:return: CreateKubernetesClusterCertResponse
"""
all_params = ['cluster_id', 'create_kubernetes_cluster_cert_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["Port-ID"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/clustercert',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateKubernetesClusterCertResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_node(self, request):
"""创建节点
该API用于在指定集群下创建节点。 > - 若无集群,请先[[创建集群](https://support.huaweicloud.com/api-cce/cce_02_0236.html)](tag:hws)[[创建集群](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0236.html)](tag:hws_hk)。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param CreateNodeRequest request
:return: CreateNodeResponse
"""
return self.create_node_with_http_info(request)
def create_node_with_http_info(self, request):
"""创建节点
该API用于在指定集群下创建节点。 > - 若无集群,请先[[创建集群](https://support.huaweicloud.com/api-cce/cce_02_0236.html)](tag:hws)[[创建集群](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0236.html)](tag:hws_hk)。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param CreateNodeRequest request
:return: CreateNodeResponse
"""
all_params = ['cluster_id', 'create_node_request_body', 'nodepool_scale_up']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
if 'nodepool_scale_up' in local_var_params:
query_params.append(('nodepoolScaleUp', local_var_params['nodepool_scale_up']))
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_node_pool(self, request):
"""创建节点池
该API用于在指定集群下创建节点池。仅支持集群在处于可用、扩容、缩容状态时调用。 > 若无集群,请先[[创建集群](https://support.huaweicloud.com/api-cce/cce_02_0236.html)](tag:hws)[[创建集群](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0236.html)](tag:hws_hk)。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param CreateNodePoolRequest request
:return: CreateNodePoolResponse
"""
return self.create_node_pool_with_http_info(request)
def create_node_pool_with_http_info(self, request):
"""创建节点池
该API用于在指定集群下创建节点池。仅支持集群在处于可用、扩容、缩容状态时调用。 > 若无集群,请先[[创建集群](https://support.huaweicloud.com/api-cce/cce_02_0236.html)](tag:hws)[[创建集群](https://support.huaweicloud.com/intl/zh-cn/api-cce/cce_02_0236.html)](tag:hws_hk)。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param CreateNodePoolRequest request
:return: CreateNodePoolResponse
"""
all_params = ['cluster_id', 'create_node_pool_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodepools',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateNodePoolResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_addon_instance(self, request):
"""删除AddonInstance
删除插件实例的功能。
:param DeleteAddonInstanceRequest request
:return: DeleteAddonInstanceResponse
"""
return self.delete_addon_instance_with_http_info(request)
def delete_addon_instance_with_http_info(self, request):
"""删除AddonInstance
删除插件实例的功能。
:param DeleteAddonInstanceRequest request
:return: DeleteAddonInstanceResponse
"""
all_params = ['id', 'cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id']
query_params = []
if 'cluster_id' in local_var_params:
query_params.append(('cluster_id', local_var_params['cluster_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addons/{id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteAddonInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_cloud_persistent_volume_claims(self, request):
"""删除PVC
该API用于删除指定Namespace下的PVC(PersistentVolumeClaim)对象,并可以选择保留后端的云存储。 >存储管理的URL格式为:https://{clusterid}.Endpoint/uri。其中{clusterid}为集群ID,uri为资源路径,也即API访问的路径。如果使用https://Endpoint/uri,则必须指定请求header中的X-Cluster-ID参数。
:param DeleteCloudPersistentVolumeClaimsRequest request
:return: DeleteCloudPersistentVolumeClaimsResponse
"""
return self.delete_cloud_persistent_volume_claims_with_http_info(request)
def delete_cloud_persistent_volume_claims_with_http_info(self, request):
"""删除PVC
该API用于删除指定Namespace下的PVC(PersistentVolumeClaim)对象,并可以选择保留后端的云存储。 >存储管理的URL格式为:https://{clusterid}.Endpoint/uri。其中{clusterid}为集群ID,uri为资源路径,也即API访问的路径。如果使用https://Endpoint/uri,则必须指定请求header中的X-Cluster-ID参数。
:param DeleteCloudPersistentVolumeClaimsRequest request
:return: DeleteCloudPersistentVolumeClaimsResponse
"""
all_params = ['name', 'namespace', 'delete_volume', 'storage_type', 'x_cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name']
if 'namespace' in local_var_params:
path_params['namespace'] = local_var_params['namespace']
query_params = []
if 'delete_volume' in local_var_params:
query_params.append(('deleteVolume', local_var_params['delete_volume']))
if 'storage_type' in local_var_params:
query_params.append(('storageType', local_var_params['storage_type']))
header_params = {}
if 'x_cluster_id' in local_var_params:
header_params['X-Cluster-ID'] = local_var_params['x_cluster_id']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v1/namespaces/{namespace}/cloudpersistentvolumeclaims/{name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteCloudPersistentVolumeClaimsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_cluster(self, request):
"""删除集群
该API用于删除一个指定的集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param DeleteClusterRequest request
:return: DeleteClusterResponse
"""
return self.delete_cluster_with_http_info(request)
def delete_cluster_with_http_info(self, request):
"""删除集群
该API用于删除一个指定的集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param DeleteClusterRequest request
:return: DeleteClusterResponse
"""
all_params = ['cluster_id', 'delete_efs', 'delete_eni', 'delete_evs', 'delete_net', 'delete_obs', 'delete_sfs', 'tobedeleted']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
if 'delete_efs' in local_var_params:
query_params.append(('delete_efs', local_var_params['delete_efs']))
if 'delete_eni' in local_var_params:
query_params.append(('delete_eni', local_var_params['delete_eni']))
if 'delete_evs' in local_var_params:
query_params.append(('delete_evs', local_var_params['delete_evs']))
if 'delete_net' in local_var_params:
query_params.append(('delete_net', local_var_params['delete_net']))
if 'delete_obs' in local_var_params:
query_params.append(('delete_obs', local_var_params['delete_obs']))
if 'delete_sfs' in local_var_params:
query_params.append(('delete_sfs', local_var_params['delete_sfs']))
if 'tobedeleted' in local_var_params:
query_params.append(('tobedeleted', local_var_params['tobedeleted']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_node(self, request):
"""删除节点
该API用于删除指定的节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param DeleteNodeRequest request
:return: DeleteNodeResponse
"""
return self.delete_node_with_http_info(request)
def delete_node_with_http_info(self, request):
"""删除节点
该API用于删除指定的节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param DeleteNodeRequest request
:return: DeleteNodeResponse
"""
all_params = ['cluster_id', 'node_id', 'nodepool_scale_down']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
query_params = []
if 'nodepool_scale_down' in local_var_params:
query_params.append(('nodepoolScaleDown', local_var_params['nodepool_scale_down']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/{node_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_node_pool(self, request):
"""删除节点池
该API用于删除指定的节点池。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param DeleteNodePoolRequest request
:return: DeleteNodePoolResponse
"""
return self.delete_node_pool_with_http_info(request)
def delete_node_pool_with_http_info(self, request):
"""删除节点池
该API用于删除指定的节点池。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param DeleteNodePoolRequest request
:return: DeleteNodePoolResponse
"""
all_params = ['cluster_id', 'nodepool_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'nodepool_id' in local_var_params:
path_params['nodepool_id'] = local_var_params['nodepool_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodepools/{nodepool_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteNodePoolResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def hibernate_cluster(self, request):
"""集群休眠
集群休眠用于将运行中的集群置于休眠状态,休眠后,将不再收取控制节点资源费用。
:param HibernateClusterRequest request
:return: HibernateClusterResponse
"""
return self.hibernate_cluster_with_http_info(request)
def hibernate_cluster_with_http_info(self, request):
"""集群休眠
集群休眠用于将运行中的集群置于休眠状态,休眠后,将不再收取控制节点资源费用。
:param HibernateClusterRequest request
:return: HibernateClusterResponse
"""
all_params = ['cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/operation/hibernate',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='HibernateClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_addon_instances(self, request):
"""获取AddonInstance列表
获取集群所有已安装插件实例
:param ListAddonInstancesRequest request
:return: ListAddonInstancesResponse
"""
return self.list_addon_instances_with_http_info(request)
def list_addon_instances_with_http_info(self, request):
"""获取AddonInstance列表
获取集群所有已安装插件实例
:param ListAddonInstancesRequest request
:return: ListAddonInstancesResponse
"""
all_params = ['cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'cluster_id' in local_var_params:
query_params.append(('cluster_id', local_var_params['cluster_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addons',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAddonInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_addon_templates(self, request):
"""查询AddonTemplates列表
插件模板查询接口,查询插件信息。
:param ListAddonTemplatesRequest request
:return: ListAddonTemplatesResponse
"""
return self.list_addon_templates_with_http_info(request)
def list_addon_templates_with_http_info(self, request):
"""查询AddonTemplates列表
插件模板查询接口,查询插件信息。
:param ListAddonTemplatesRequest request
:return: ListAddonTemplatesResponse
"""
all_params = ['addon_template_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'addon_template_name' in local_var_params:
query_params.append(('addon_template_name', local_var_params['addon_template_name']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addontemplates',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAddonTemplatesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_clusters(self, request):
"""获取指定项目下的集群
该API用于获取指定项目下所有集群的详细信息。
:param ListClustersRequest request
:return: ListClustersResponse
"""
return self.list_clusters_with_http_info(request)
def list_clusters_with_http_info(self, request):
"""获取指定项目下的集群
该API用于获取指定项目下所有集群的详细信息。
:param ListClustersRequest request
:return: ListClustersResponse
"""
all_params = ['detail', 'status', 'type', 'version']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'detail' in local_var_params:
query_params.append(('detail', local_var_params['detail']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'version' in local_var_params:
query_params.append(('version', local_var_params['version']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListClustersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_node_pools(self, request):
"""获取集群下所有节点池
该API用于获取集群下所有节点池。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - nodepool是集群中具有相同配置的节点实例的子集。
:param ListNodePoolsRequest request
:return: ListNodePoolsResponse
"""
return self.list_node_pools_with_http_info(request)
def list_node_pools_with_http_info(self, request):
"""获取集群下所有节点池
该API用于获取集群下所有节点池。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - nodepool是集群中具有相同配置的节点实例的子集。
:param ListNodePoolsRequest request
:return: ListNodePoolsResponse
"""
all_params = ['cluster_id', 'show_default_node_pool']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
if 'show_default_node_pool' in local_var_params:
query_params.append(('showDefaultNodePool', local_var_params['show_default_node_pool']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodepools',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListNodePoolsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_nodes(self, request):
"""获取集群下所有节点
该API用于通过集群ID获取指定集群下所有节点的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ListNodesRequest request
:return: ListNodesResponse
"""
return self.list_nodes_with_http_info(request)
def list_nodes_with_http_info(self, request):
"""获取集群下所有节点
该API用于通过集群ID获取指定集群下所有节点的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ListNodesRequest request
:return: ListNodesResponse
"""
all_params = ['cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListNodesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def migrate_node(self, request):
"""节点迁移
该API用于在指定集群下迁移节点到另一集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param MigrateNodeRequest request
:return: MigrateNodeResponse
"""
return self.migrate_node_with_http_info(request)
def migrate_node_with_http_info(self, request):
"""节点迁移
该API用于在指定集群下迁移节点到另一集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param MigrateNodeRequest request
:return: MigrateNodeResponse
"""
all_params = ['cluster_id', 'target_cluster_id', 'migrate_nodes_task']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'target_cluster_id' in local_var_params:
path_params['target_cluster_id'] = local_var_params['target_cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/operation/migrateto/{target_cluster_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='MigrateNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def remove_node(self, request):
"""节点移除
该API用于在指定集群下移除节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param RemoveNodeRequest request
:return: RemoveNodeResponse
"""
return self.remove_node_with_http_info(request)
def remove_node_with_http_info(self, request):
"""节点移除
该API用于在指定集群下移除节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param RemoveNodeRequest request
:return: RemoveNodeResponse
"""
all_params = ['cluster_id', 'remove_nodes_task']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/operation/remove',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RemoveNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def reset_node(self, request):
"""重置节点
该API用于在指定集群下重置节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ResetNodeRequest request
:return: ResetNodeResponse
"""
return self.reset_node_with_http_info(request)
def reset_node_with_http_info(self, request):
"""重置节点
该API用于在指定集群下重置节点。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ResetNodeRequest request
:return: ResetNodeResponse
"""
all_params = ['cluster_id', 'reset_node_list']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/reset',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResetNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_addon_instance(self, request):
"""获取AddonInstance详情
获取插件实例详情。
:param ShowAddonInstanceRequest request
:return: ShowAddonInstanceResponse
"""
return self.show_addon_instance_with_http_info(request)
def show_addon_instance_with_http_info(self, request):
"""获取AddonInstance详情
获取插件实例详情。
:param ShowAddonInstanceRequest request
:return: ShowAddonInstanceResponse
"""
all_params = ['id', 'cluster_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id']
query_params = []
if 'cluster_id' in local_var_params:
query_params.append(('cluster_id', local_var_params['cluster_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addons/{id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowAddonInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_cluster(self, request):
"""获取指定的集群
该API用于获取指定集群的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ShowClusterRequest request
:return: ShowClusterResponse
"""
return self.show_cluster_with_http_info(request)
def show_cluster_with_http_info(self, request):
"""获取指定的集群
该API用于获取指定集群的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ShowClusterRequest request
:return: ShowClusterResponse
"""
all_params = ['cluster_id', 'detail']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
if 'detail' in local_var_params:
query_params.append(('detail', local_var_params['detail']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_job(self, request):
"""获取任务信息
该API用于获取任务信息。通过某一任务请求下发后返回的jobID来查询指定任务的进度。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - 该接口通常使用场景为: > - 创建、删除集群时,查询相应任务的进度。 > - 创建、删除节点时,查询相应任务的进度。
:param ShowJobRequest request
:return: ShowJobResponse
"""
return self.show_job_with_http_info(request)
def show_job_with_http_info(self, request):
"""获取任务信息
该API用于获取任务信息。通过某一任务请求下发后返回的jobID来查询指定任务的进度。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - 该接口通常使用场景为: > - 创建、删除集群时,查询相应任务的进度。 > - 创建、删除节点时,查询相应任务的进度。
:param ShowJobRequest request
:return: ShowJobResponse
"""
all_params = ['job_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'job_id' in local_var_params:
path_params['job_id'] = local_var_params['job_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/jobs/{job_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowJobResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_node(self, request):
"""获取指定的节点
该API用于通过节点ID获取指定节点的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ShowNodeRequest request
:return: ShowNodeResponse
"""
return self.show_node_with_http_info(request)
def show_node_with_http_info(self, request):
"""获取指定的节点
该API用于通过节点ID获取指定节点的详细信息。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param ShowNodeRequest request
:return: ShowNodeResponse
"""
all_params = ['cluster_id', 'node_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/{node_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_node_pool(self, request):
"""获取指定的节点池
该API用于获取指定节点池的详细信息。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param ShowNodePoolRequest request
:return: ShowNodePoolResponse
"""
return self.show_node_pool_with_http_info(request)
def show_node_pool_with_http_info(self, request):
"""获取指定的节点池
该API用于获取指定节点池的详细信息。 > 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径
:param ShowNodePoolRequest request
:return: ShowNodePoolResponse
"""
all_params = ['cluster_id', 'nodepool_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'nodepool_id' in local_var_params:
path_params['nodepool_id'] = local_var_params['nodepool_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodepools/{nodepool_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowNodePoolResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_addon_instance(self, request):
"""更新AddonInstance
更新插件实例的功能。
:param UpdateAddonInstanceRequest request
:return: UpdateAddonInstanceResponse
"""
return self.update_addon_instance_with_http_info(request)
def update_addon_instance_with_http_info(self, request):
"""更新AddonInstance
更新插件实例的功能。
:param UpdateAddonInstanceRequest request
:return: UpdateAddonInstanceResponse
"""
all_params = ['id', 'update_addon_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/addons/{id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateAddonInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_cluster(self, request):
"""更新指定的集群
该API用于更新指定的集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param UpdateClusterRequest request
:return: UpdateClusterResponse
"""
return self.update_cluster_with_http_info(request)
def update_cluster_with_http_info(self, request):
"""更新指定的集群
该API用于更新指定的集群。 >集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param UpdateClusterRequest request
:return: UpdateClusterResponse
"""
all_params = ['cluster_id', 'update_cluster_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateClusterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_node(self, request):
"""更新指定的节点
该API用于更新指定的节点。 > - 当前仅支持更新metadata下的name字段,即节点的名字。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param UpdateNodeRequest request
:return: UpdateNodeResponse
"""
return self.update_node_with_http_info(request)
def update_node_with_http_info(self, request):
"""更新指定的节点
该API用于更新指定的节点。 > - 当前仅支持更新metadata下的name字段,即节点的名字。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径。
:param UpdateNodeRequest request
:return: UpdateNodeResponse
"""
all_params = ['cluster_id', 'node_id', 'update_node_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodes/{node_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_node_pool(self, request):
"""更新指定节点池
该API用于更新指定的节点池。仅支持集群在处于可用、扩容、缩容状态时调用。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - 当前仅支持更新节点池名称,spec下的initialNodeCount,k8sTags, taints,login,userTags与节点池的扩缩容配置相关字段。
:param UpdateNodePoolRequest request
:return: UpdateNodePoolResponse
"""
return self.update_node_pool_with_http_info(request)
def update_node_pool_with_http_info(self, request):
"""更新指定节点池
该API用于更新指定的节点池。仅支持集群在处于可用、扩容、缩容状态时调用。 > - 集群管理的URL格式为:https://Endpoint/uri。其中uri为资源路径,也即API访问的路径 > - 当前仅支持更新节点池名称,spec下的initialNodeCount,k8sTags, taints,login,userTags与节点池的扩缩容配置相关字段。
:param UpdateNodePoolRequest request
:return: UpdateNodePoolResponse
"""
all_params = ['cluster_id', 'nodepool_id', 'update_node_pool_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'cluster_id' in local_var_params:
path_params['cluster_id'] = local_var_params['cluster_id']
if 'nodepool_id' in local_var_params:
path_params['nodepool_id'] = local_var_params['nodepool_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/api/v3/projects/{project_id}/clusters/{cluster_id}/nodepools/{nodepool_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateNodePoolResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| 32.979226
| 820
| 0.638053
| 7,058
| 69,850
| 5.943185
| 0.058515
| 0.038906
| 0.068086
| 0.027082
| 0.896679
| 0.887072
| 0.859227
| 0.833671
| 0.824922
| 0.789925
| 0
| 0.002623
| 0.26869
| 69,850
| 2,117
| 821
| 32.994804
| 0.818547
| 0.185884
| 0
| 0.802805
| 0
| 0.000825
| 0.114787
| 0.045249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05363
| false
| 0
| 0.008251
| 0
| 0.117987
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b12bf172edbe9c86b6067d3b98e45c4fa9e91321
| 75
|
py
|
Python
|
samplyser/pitch/__init__.py
|
uummoo/samplyser
|
5fe245332b3488a9d3beac87914ef4d309ef4451
|
[
"MIT"
] | null | null | null |
samplyser/pitch/__init__.py
|
uummoo/samplyser
|
5fe245332b3488a9d3beac87914ef4d309ef4451
|
[
"MIT"
] | null | null | null |
samplyser/pitch/__init__.py
|
uummoo/samplyser
|
5fe245332b3488a9d3beac87914ef4d309ef4451
|
[
"MIT"
] | null | null | null |
from samplyser.pitch import parabolic
from samplyser.pitch import detector
| 25
| 37
| 0.866667
| 10
| 75
| 6.5
| 0.6
| 0.4
| 0.553846
| 0.738462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 75
| 2
| 38
| 37.5
| 0.970149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b13539462a3c393895b9ed0dd10a960e5b8582dd
| 45
|
py
|
Python
|
modules/app/groups/__init__.py
|
uwrit/kpmp_user_portal
|
d20ce6290a0ad64f63a6f043bf6ff061b4968953
|
[
"BSD-3-Clause"
] | 1
|
2019-07-09T17:14:40.000Z
|
2019-07-09T17:14:40.000Z
|
modules/app/groups/__init__.py
|
uwrit/kpmp-user-portal
|
d20ce6290a0ad64f63a6f043bf6ff061b4968953
|
[
"BSD-3-Clause"
] | 7
|
2019-03-17T02:19:21.000Z
|
2019-03-29T15:11:53.000Z
|
modules/app/groups/__init__.py
|
uwrit/kpmp_user_portal
|
d20ce6290a0ad64f63a6f043bf6ff061b4968953
|
[
"BSD-3-Clause"
] | null | null | null |
from .groups import get_for_one, get_for_many
| 45
| 45
| 0.866667
| 9
| 45
| 3.888889
| 0.777778
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b14eac6c3123c7cf5877037ae65a34a6c65ce1c6
| 4,083
|
py
|
Python
|
tests/unit/test_models_other.py
|
timgates42/lore
|
cfca2b459e2b333b8400c0751be2856cedb5eedd
|
[
"MIT"
] | 1,549
|
2017-11-09T10:45:41.000Z
|
2022-02-25T02:11:43.000Z
|
tests/unit/test_models_other.py
|
timgates42/lore
|
cfca2b459e2b333b8400c0751be2856cedb5eedd
|
[
"MIT"
] | 65
|
2018-01-08T22:57:59.000Z
|
2022-03-07T18:32:08.000Z
|
tests/unit/test_models_other.py
|
timgates42/lore
|
cfca2b459e2b333b8400c0751be2856cedb5eedd
|
[
"MIT"
] | 129
|
2018-01-10T21:39:30.000Z
|
2022-03-18T05:40:00.000Z
|
import unittest
import tests.mocks.models_other
import numpy
class TestXGBoostRegression(unittest.TestCase):
def test_lifecycle(self):
model = tests.mocks.models_other.XGBoostRegression()
model.fit()
model.save()
loaded = tests.mocks.models_other.XGBoostRegression.load()
self.assertEqual(loaded.fitting, model.fitting)
class TestXGBoostBinaryClassifier(unittest.TestCase):
def test_lifecycle(self):
model = tests.mocks.models_other.XGBoostBinaryClassifier()
model.fit()
model.save()
loaded = tests.mocks.models_other.XGBoostBinaryClassifier.load()
self.assertEqual(loaded.fitting, model.fitting)
def test_probs(self):
model = tests.mocks.models_other.XGBoostBinaryClassifier()
model.fit()
model.predict_proba(model.pipeline.test_data)
assert True
class TestSKLearn(unittest.TestCase):
def test_lifecycle(self):
model = tests.mocks.models_other.SVM()
model.fit()
model.save()
loaded = tests.mocks.models_other.SVM.load()
self.assertEqual(loaded.fitting, model.fitting)
def test_before_after_hooks(self):
model = tests.mocks.models_other.SVM()
model.fit(test=True, score=True)
model.predict(model.pipeline.test_data)
self.assertTrue(model.called_before_fit)
self.assertTrue(model.called_after_fit)
self.assertTrue(model.called_before_predict)
self.assertTrue(model.called_after_predict)
self.assertTrue(model.called_before_evaluate)
self.assertTrue(model.called_after_evaluate)
self.assertTrue(model.called_before_score)
self.assertTrue(model.called_after_score)
class TestOneHotBinaryClassifier(unittest.TestCase):
def test_lifecycle(self):
model = tests.mocks.models_other.OneHotBinaryClassifier()
model.fit()
model.save()
loaded = tests.mocks.models_other.OneHotBinaryClassifier.load()
self.assertEqual(loaded.fitting, model.fitting)
def test_before_after_hooks(self):
model = tests.mocks.models_other.OneHotBinaryClassifier()
model.fit(test=True, score=True)
model.predict(model.pipeline.test_data)
self.assertTrue(model.called_before_fit)
self.assertTrue(model.called_after_fit)
self.assertTrue(model.called_before_predict)
self.assertTrue(model.called_after_predict)
self.assertTrue(model.called_before_evaluate)
self.assertTrue(model.called_after_evaluate)
self.assertTrue(model.called_before_score)
self.assertTrue(model.called_after_score)
class TestNaiveBinaryClassifier(unittest.TestCase):
def test_lifecycle(self):
model = tests.mocks.models_other.NaiveBinaryClassifier()
model.fit()
model.save()
loaded = tests.mocks.models_other.NaiveBinaryClassifier.load()
self.assertEqual(loaded.fitting, model.fitting)
def test_before_after_hooks(self):
model = tests.mocks.models_other.NaiveBinaryClassifier()
model.fit(test=True, score=True)
model.predict(model.pipeline.test_data)
self.assertTrue(model.called_before_fit)
self.assertTrue(model.called_after_fit)
self.assertTrue(model.called_before_predict)
self.assertTrue(model.called_after_predict)
self.assertTrue(model.called_before_evaluate)
self.assertTrue(model.called_after_evaluate)
self.assertTrue(model.called_before_score)
self.assertTrue(model.called_after_score)
def test_preds(self):
model = tests.mocks.models_other.NaiveBinaryClassifier()
model.fit(test=True, score=True)
preds = model.predict(model.pipeline.test_data)
self.assertTrue((preds == 1).all())
def test_probs(self):
model = tests.mocks.models_other.NaiveBinaryClassifier()
model.fit(test=True, score=True)
probs = model.predict_proba(model.pipeline.test_data)[:, 1]
self.assertTrue((numpy.abs(probs - 0.667) < 0.001).all())
| 36.132743
| 72
| 0.707323
| 464
| 4,083
| 6.030172
| 0.109914
| 0.130093
| 0.162974
| 0.214439
| 0.908149
| 0.871337
| 0.871337
| 0.830236
| 0.813438
| 0.689778
| 0
| 0.003036
| 0.19324
| 4,083
| 112
| 73
| 36.455357
| 0.846387
| 0
| 0
| 0.764045
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.359551
| 1
| 0.123596
| false
| 0
| 0.033708
| 0
| 0.213483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4922e0c3539c92f17aca10136f56daaa75b60892
| 295
|
py
|
Python
|
serialkeys/__init__.py
|
Ben-Bock/serialkeys
|
308212a3dd2a3fb5689d5cb65a89428a0cf95f71
|
[
"MIT"
] | null | null | null |
serialkeys/__init__.py
|
Ben-Bock/serialkeys
|
308212a3dd2a3fb5689d5cb65a89428a0cf95f71
|
[
"MIT"
] | null | null | null |
serialkeys/__init__.py
|
Ben-Bock/serialkeys
|
308212a3dd2a3fb5689d5cb65a89428a0cf95f71
|
[
"MIT"
] | null | null | null |
from .serialkeys import Serialkeys
from .exceptions import Exception_OnlyPositiveCount
from .exceptions import Exception_AlphabetTooSmall
from .exceptions import Exception_ChunkLenghtNotPositive
from .exceptions import Exception_KeyTooShort
from .exceptions import Exception_TooManyKeysRequested
| 49.166667
| 56
| 0.901695
| 29
| 295
| 9
| 0.344828
| 0.268199
| 0.383142
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077966
| 295
| 6
| 57
| 49.166667
| 0.959559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
495b5ee261f2699e63c41d5822661ff3e09cf8e6
| 153
|
py
|
Python
|
quote.py
|
G4me4u/typeformer
|
247b50aa4cee2a0f2e0d224cb3eaebbb2f3b6bc0
|
[
"MIT"
] | null | null | null |
quote.py
|
G4me4u/typeformer
|
247b50aa4cee2a0f2e0d224cb3eaebbb2f3b6bc0
|
[
"MIT"
] | null | null | null |
quote.py
|
G4me4u/typeformer
|
247b50aa4cee2a0f2e0d224cb3eaebbb2f3b6bc0
|
[
"MIT"
] | null | null | null |
class Quote:
def __init__(self, text, origin):
self.text = text
self.origin = origin
def __str__(self):
return self.text + " : " + self.origin
| 17
| 40
| 0.660131
| 21
| 153
| 4.428571
| 0.428571
| 0.258065
| 0.301075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20915
| 153
| 9
| 40
| 17
| 0.768595
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b8d01502a558bb55ff565fe2b918a7bcdab79bb2
| 51,680
|
py
|
Python
|
integration-testing/casperlabsnode_testing/pregenerated_keypairs.py
|
afck/CasperLabs
|
23296d8c630ad9cd25277c9e7c40bcd946dfdc06
|
[
"Apache-2.0"
] | null | null | null |
integration-testing/casperlabsnode_testing/pregenerated_keypairs.py
|
afck/CasperLabs
|
23296d8c630ad9cd25277c9e7c40bcd946dfdc06
|
[
"Apache-2.0"
] | null | null | null |
integration-testing/casperlabsnode_testing/pregenerated_keypairs.py
|
afck/CasperLabs
|
23296d8c630ad9cd25277c9e7c40bcd946dfdc06
|
[
"Apache-2.0"
] | null | null | null |
from casperlabsnode_testing.common import KeyPair
PREGENERATED_KEYPAIRS = [
KeyPair(private_key='901b1f0837b7e891d7c2ea0047f502fd95637e450b0226c39a97d68dd951c8a7', public_key='00322ba649cebf90d8bd0eeb0658ea7957bcc59ecee0676c86f4fec517c06251'),
KeyPair(private_key='f7bfb2b3f2be909dd50beac05bece5940b1e7266816d7294291a2ff66a5d660b', public_key='00be417b7d7032bf742dac491ea3318a757e7420ca313afa2862147ac41f8df9'),
KeyPair(private_key='2b173084083291ac6850cb734dffb69dfcb280aeb152f0d5be979bea7827c03a', public_key='017f286d499ab1d4a43a0b2efed6f12935e273fb6027daefa1959a8953354d77'),
KeyPair(private_key='97b4fb2f783af685ef25cf150e63f41be7f46d32ddb7258a2edd092dcc4dfd75', public_key='02111c955feedc8762c47b1bbb47c29f191ba43ad7e1d47e5427a6e583a88e0e'),
KeyPair(private_key='120d42175739387af0264921bb117e4c4c05fbe2ce5410031e8b158c6e414bb5', public_key='02ab69930f74b931209df3ce54e3993674ab3e7c98f715608a5e74048b332821'),
KeyPair(private_key='1f52d0bce0a92f5c79f2a88aae6d391ddf853e2eb8e688c5aa68002205f92dad', public_key='043c56051a613623cd024976427c073fe9c198ac2b98315a4baff9d333fbb42e'),
KeyPair(private_key='2bdedd2e4dd2e7b5f176b7a5bc155f10fafd3fbd9c03fb7556f2ffd22c786f8b', public_key='068e8311fe094e1a33646a1f8dfb50a5c12b49a1e5d0cf4cccf28d31b4a10255'),
KeyPair(private_key='a3e8ed804d57a3d39899a70b474a56627a5c0a82d50e2acf43208e501a75d3f6', public_key='06977deb368646726c328397922530b06529620e92f7f24825f9dc3cd114fc9b'),
KeyPair(private_key='7e968e631ead312aaebf3dcbd6915e57fa85cf8cf30e811c5cc06aac2c865ffd', public_key='071a8b0acd10a3bb365768d91b6015ad70c6c960a8ea4c61344a2e92c09e8ca1'),
KeyPair(private_key='ce68e4f89922609d0d8e6351abb928b58716fc6d29bc6ec2abafb08e892577f9', public_key='0968e49f98f1f72e0076a9fa120484ad5a8515fa377ead9fb3e15abe70fa6736'),
KeyPair(private_key='655e5f313442e90a9fc06ea586ad2e81e4c08673196ec2aceb83db318cb1945c', public_key='0b84a57c06e30da322c11099b2443efaa2ab4539ed2513f4325868772c5f1004'),
KeyPair(private_key='25aa7c595d49f9124f90cfbf648d1a98a968f3228eecef52b5ac7688159eb3cc', public_key='0bf3cddd997043b79660aa93f5032c797374bdb1f6350bc5fe85c59f3c54d3e0'),
KeyPair(private_key='605b545714c25e921b8089a1ca016ff8143a94e0a473d73a5828dd319c49262c', public_key='0d7563bf6b8a692476e4f6b40b47673f6300bc97d272a4e349738732b6de1b84'),
KeyPair(private_key='f19ed6a87c73385c0b4c1bf2c997891218ff00675ed5468a2158f20396c1aeed', public_key='0fa1ed9b451e62864bf6f332fb0e2b6d5a4006687e955a760a6653df72ca0a10'),
KeyPair(private_key='8aa20a1e85d3d2a59dffe6d30376efb8ce05da18899a22e09557e0a2c489365e', public_key='10298c6227121f31b06d3cd8e682c15978f46f2b787d27e72e8f7c1874e60083'),
KeyPair(private_key='7fb2bfd2e0a9c250a72d9cbbc84f89a8710ef81952d7b5a4b0c84b4135a27ad6', public_key='120e132360aff67b2fb3961fdad7065efb8ca41e483f68af7fbe7fe3eb3aef43'),
KeyPair(private_key='b65093414fcf8d779a5f6d8f07fc7bc43d83ae6070ae023002582a331c4d88e2', public_key='128123c4782874fafb396c934bcdf8b04077974c9c66e99525aa6d072320019b'),
KeyPair(private_key='b2527b00340a83e302beae2a8daf6d654e8e57541acfa261cc1b5635eb16aa15', public_key='12a764e25517b99c35db937115f480a5067f6a809dd0c1f382ad2a9212a012c3'),
KeyPair(private_key='9d31a6e0081a6ecb9d01e1898f49b8670e7526556f1cafcd852d518a72fe9544', public_key='137dbe22cfa5b9dc02a3d792efb5ada53f3b312396050e6bc3a32961ffee979e'),
KeyPair(private_key='4979850e4da91e5fdef9beb24a6f83c7fdef644441e2bbbf827b0c0c8bb62531', public_key='14d3b8998bd1b9dec729161dd602a11375741c69f1542dff86568fa08ddc4fc4'),
KeyPair(private_key='19d1d36888aad7d2372b32ba45ec7ce11eb0fcf65c3b055682772177639eec89', public_key='153837befaf7913d8029355d90a10b451a666b1ac72eb867881a0485b4f7a3fb'),
KeyPair(private_key='9a801debae8bb97fe54c99389cafa576c60612503348578125b65ab182ff5850', public_key='155244935e80a72b4f3e83fb2fcfbb8d07c5bfdbc91e3b6c96371583139acad8'),
KeyPair(private_key='632a21e0176c4daed1ca78f08f98885f61d2050e0391e31eae59ff1a35ccca7f', public_key='15ab05a878ddce564865e591ff5927613b341e39572065dfeaa0b8b442b6084b'),
KeyPair(private_key='3596e2e5fd14b24a6d84af04b7f0a8f13e3e68ee2ca91dc4b19550f12e61502c', public_key='166cabda8cf18d35df268126f9c575c43726cf210c522384346c9e60ae019081'),
KeyPair(private_key='6d10c565a00009504e1bd4da3d1eafd7c38f1988950991c73d8012ace86e5001', public_key='16c8c8ece560ede0fc48fd7b2fa5ffe86b3a5ad5ae970eaea7bcfe00fd89f3f1'),
KeyPair(private_key='774149895133833120a34bab83927e34f03f32ed22949f48209de9335953d6c4', public_key='16dc6cf7becdf919dfa2ff3fb6a1a435d8102a7db3127a0b9d2376dd2bbf8ffb'),
KeyPair(private_key='567ea426deaeb8233f134c3a266149fb196d6eea7d28b447dfefff92002cb400', public_key='17815c1cc861ce91f8959e7948730f6bd3d2054716a600123f5cf6da515e14ca'),
KeyPair(private_key='7372199a5f45200f6385eb9ce7c2a36c51e68705e9b5dd689be3e406bba6d1ba', public_key='17893b71d5fd70e716aaef9f9a71fd3b2f309ff00748f059a22e4a4243b0653e'),
KeyPair(private_key='30aac22f692502e5a19a6fd24144aa7ee279a2be560aed6e99cbdce445c13879', public_key='18ef85fbfc84debb0fb03961d18c497cbd0df8a527b19c0f81bc9cb19da0d650'),
KeyPair(private_key='a72b0aa2f214b2692d043e8c8c421cb8bb3e9006bc3ff9c5cb0c5c32c7ccc41a', public_key='1912107edc4c82c20c37a9e30889c34f0d5d88598601067b6150ac9792c90d15'),
KeyPair(private_key='31729a39827aab4e56fde610216628b09bc442b9d441c29f4c5e07a3e4014bdc', public_key='1a1f68983b428521f210796937c7e3add47e7b6e6aa63bddee5d92371276f2fd'),
KeyPair(private_key='6626b453b6ed1b9d26520f47ee08a806cdd29e25965849783f319d5094103f74', public_key='1bd305ac459a207bde8d3957faeb7f6c36cde2a8d52de18efec89c72b0f88a39'),
KeyPair(private_key='952762429321ad755d62d440098e5b8e40553a7397636f73b89280bb633e50bb', public_key='1bdb9a820358587e9dc208436d4c81f132138b90db4629375b9827c72f686bc3'),
KeyPair(private_key='22519d2f0235b3cfac8eae9419b66888a68cea55484da75e575447bfa0bcc290', public_key='1c293fcb97245ccb3b52345455954c5d1a832dbdb1971f884218e7da764ac4b6'),
KeyPair(private_key='94af6ea41faec434af81a0ef30519f99b9f4580e07a03ab5dd4e2ee4c4711381', public_key='1c43a124a3a7c3eadc96ce915a1a871c85aafb75a66b24d6c99eb03862efe392'),
KeyPair(private_key='12710b527b8f7587aac346d2beef86b7b89f69f8ff24d63e6c0b04d17bfaa4ec', public_key='1c6458e8e9645fbb9e328a746508ed0093214ee8ea34361f305047433f19118f'),
KeyPair(private_key='80366db5fbb8dad7946f27037422715e4176dda41d582224db87b6c3b783d709', public_key='1cd8bf79a2c1bd0afa160f6cdfeb8597257e48135c9bf5e4823f2875a1492c97'),
KeyPair(private_key='6fbeaee73b94ac63b7bd10b7c1e16fd9b93e2beed73d486f57b4b0e2ee521927', public_key='1d034ef5e7888ca8dd615b67943dce8eb3d68951d25d468e306263c3ded0fccc'),
KeyPair(private_key='18d9712c19998edebdff471f5ebcb855649be8b564de4b9cc962bf13b7c71eeb', public_key='1d2957ce3ed1ceb7319d501075baaf8338623cb384f8f823cfcb67d888d0b86e'),
KeyPair(private_key='53dcde7493d8f9ba04b9bdefa680975ebaa15de07e5836487609e9f8e562d57e', public_key='2036e3217db76d3aa719b0b1e106bbe43b524962123ac2dbfde016d63c744388'),
KeyPair(private_key='b2d3cfc1475948f7f7a1874eeceec80f8acf0dfdb13d2a3e2539be2571705d24', public_key='20c3253e1f6d8efcc3c1798629bf41536a0d55d51e85a45edee8dbcda661b24f'),
KeyPair(private_key='4af392a20118ec214aea2b5213328b9ac4cce6f9bcc93e86be4e0faec7893767', public_key='236671f0ec1fab9455353adfd506f160456cdeea884c32d0426ecbb8b11686d2'),
KeyPair(private_key='5322dbb1828bdb44cb3275d35672bb3453347ee249a1f32d3c035e5ec3bfad1a', public_key='23bb89653c1d43578ed421e655e7a0ed9f3ed2e7eab820ad7739277e380cafa3'),
KeyPair(private_key='b02deae0593056c3e564c3428d81964245add36bf2acaa46c85a48ac55f2a823', public_key='24364b62151b13dc30e082fbfa3d480311012ee3bb302f8b5b4194763d6f1c90'),
KeyPair(private_key='52fd1c5ad02e3c0544a51ae7f31243186270231c506519a2472af676955c0442', public_key='24abe7b593adfb7333605fbaa91ef875b2b9e9cb60e306ed45e6f282e2bba652'),
KeyPair(private_key='59a9726221de613043549c19c88727ce648fea4f1271f76561460140f8f8cb55', public_key='258251dee41634bca0d58cdd4865dd832ad395312598d50cebc832b59790701d'),
KeyPair(private_key='09c8fa9559bd3d99a60927086a0a225ed6cc0114bc2150f1f8cd60f8900d8726', public_key='2590b574c6280d31e803057ea7f1bc66e27f069cfe6dbbb4fce031484b7abdb9'),
KeyPair(private_key='fbf47c835b16a6fcb287f3c620fdf144852407d3ffc8d1c5b337dab3a412501e', public_key='2627bc4243d23a39da94c75bf63a70d19613dae579d5ea9e67b7b2538fb18ffd'),
KeyPair(private_key='579d84e659e87bec87d0223bb84488e34cb7d6fd4a8030a4e259ffb7288b7060', public_key='26cf1d6ca6e4a9b7b340555b9d5b84144b4d993c5468e97a6b001ce0a1793a8b'),
KeyPair(private_key='1d52839651cc92a34e420f8a290686863e164078ede7d344bdf923ec58990178', public_key='26e0b5748451075778e40953a127150d058de9fbb6b4572c3f4b3bac765ae702'),
KeyPair(private_key='612feec901631aeae4c9378d2b453dfa269774c43f990f371056479b110a0e19', public_key='27709b71c90ee4b94e3c5fc4cab693380f16ee8304bc24a4ea811342e3dda90d'),
KeyPair(private_key='c8db6d3e3f0d56894d71cd2194c171109160cb986248f7637a437a725001c0c2', public_key='2b838fa04bae170641b8cbdc7b4137476265c995c62a39bb0124455a145ad54d'),
KeyPair(private_key='38e81dffaa441f31b54f81e1124f588922ed6c02792e5ccaa129c6ea60bea5f6', public_key='2bcf5266e210211dbc23d8f19a4cccc2242a97236898f17030d6e4c54db41825'),
KeyPair(private_key='5fc3033b960aa648ab0327eda99e7fcbba2478ba747d1dd39d5182661af640b3', public_key='2e76c9a73203608afada139c26571f250348e57c7e723c3bc36af22cc3c5e8e4'),
KeyPair(private_key='b5d2f608b2d35cb7f55b53643a5d60ce65de21a79e04e4d34114d408019efd69', public_key='2ee927865e9b75f70becfd53d5af25c91c12c8e804da7a0dd8c97334cb8441a6'),
KeyPair(private_key='df1f66738532281769e6b2eed06d31401aaf17155bc9adf5b280df310089fce9', public_key='2fd56c041fae06cb84f6a0239e5b20774e85932e8576c98c038a8d279575bda6'),
KeyPair(private_key='8e98f09119a3a9d6c637d66692d014d68c9521c25cce526a4f90495cadc7939c', public_key='302ed8cfcedf11f1b3b03f0692c018e8b1f009b87a61e828f7b90d1b51308e13'),
KeyPair(private_key='c83b8105f66a256d18b7a103a0a3f754c3f6e39e633102f13b443b9bb4f48cae', public_key='30f6d79dc1c08abd59c1142d87c5cc84dc5ec8237ae2520f08090a6f74176c26'),
KeyPair(private_key='50cca1a6afac01c41e8debb4cdf41874abbc30da33febc44974c58dcec89d49d', public_key='3115b087ef11fc609f53ec852e479c81e8d59e2a0287c736991cc5f613d05b61'),
KeyPair(private_key='3d1d4d5dbc9183bff892e1090512d030e46a6809c12d9eb1f63bd831d18a94ed', public_key='31380fc8215b3dfef1a8b3cab1cc062ace2de198b368d99c7888591b873413c1'),
KeyPair(private_key='b9c17218648a4b1cccfc0a43ce8eb49724731c4c5b55d922c1724d39102f8c9e', public_key='3183b251a4402011da67d1759c2e3b4b1114724d804c50fe9d6abe417d951856'),
KeyPair(private_key='649c6c35c790c91c06841d75ac23be3416d7ec8e1a205ea5ce4448de595c1593', public_key='31eb08dc5f75715d1782bff74f8f4d8a8b4f3392ff7eda30fae61606e4299060'),
KeyPair(private_key='0706159da2e1f290f743d1085f045efd47a3865fe8dbbe1e586cfe08321fa0e1', public_key='3245617c2fe41614009193645181d952db64769af09fe697d80bbde48afc4793'),
KeyPair(private_key='f30c4d66494a5daaee35a675049a7f7b615e2f2401c24ab52816800aaa4b84fa', public_key='33292234acaeefeab606c28b0a81a717b49c527d7b96595960504c0516cad04b'),
KeyPair(private_key='2f912686e09854565bed31ba392949476867e4921923208582b44f8e948efd77', public_key='365f48100a1ee7ca72d93ef561f4cb04b95a1bd59846395630d7828686502f4a'),
KeyPair(private_key='f92912400435ae6e400459b01ac440a3332239af0c593892a7907532b345215e', public_key='36706ca8f8a14e13c298f0f87bb38e77c55a2446b85df55e181453cf655ef86f'),
KeyPair(private_key='d2ecac63afde7c912067af088a49b4690856df8d5c1413f946c20ea568f2d268', public_key='370b0e43e408379ddc99e5504153be4a739c9493bd3b6905eb63172457538f9c'),
KeyPair(private_key='7610fb25e3ef0670fa0a617f34f863b0b9e8845ea465f2317c987008cc27f7b9', public_key='371638e5b601e22be839f36b64e1f4b1a5a67b8bdef5b61e2d2beeeeda02b208'),
KeyPair(private_key='bc3775a236c92e9d64f3eb4ce13380b6acbb40b6d1a971628b8b5bd037b57b02', public_key='37ad298548f87e946c0daadf23c7451d66c0b7271220544875ae8ab7d87b9807'),
KeyPair(private_key='9f47334241f7076aea72365b4095801116dd94a1c039b7d06dd5dbb526af9832', public_key='392211b6ba85b0bb16998e9f06d6da18a9e28aabee57e537d53c6b5bd93caef1'),
KeyPair(private_key='782b6df79d8a4a08509f372da80130746c22de456e32927a8ed8fbb06a929485', public_key='39ddc3a85dd9fad5e5c794095dc98c0e1b94f76bb0fa41e214780eaef89dbc36'),
KeyPair(private_key='2fb094b44702537f7cd5c363d6d5f728d7744b9521c8acc03e09adb7cb8bfeb3', public_key='3b2f518a79291f91c1b351a403b6fbcbbe3a4c64631acf2cab08ebda0d723197'),
KeyPair(private_key='177b97ef823dbb288c091045a2386e04ac7321ec20c3b0e0747002eccdf459cc', public_key='3c150b91f4244815735e5ac98eba09a6aa936ae4e35504411deaf62a16496b20'),
KeyPair(private_key='d19bab22bed4d72422e5ae2dc37982635e1d894f5a63dc4f790836a86a05b7ba', public_key='3d86379153caab3389d3fb85ea519263b6b79f5112de2b81aba7c3b1866058b3'),
KeyPair(private_key='7633ce7ccf2f84c6c4f96b584e640b00b53aa44998d366e8b518d31780e8aa60', public_key='3d9d8955fe56a4959cd7ded1f2b779ada46477fd5c8b7968151dd8738301a321'),
KeyPair(private_key='8925cff571318778f1edb00851f72c6a0c08a8b893e93f2110ca1b0489328b53', public_key='3e27bf0574c8674b76ceb03b42892f4fbf173ca649fb8aedd4c5196cac78c1d3'),
KeyPair(private_key='7694197c4010b524cb7e14665d869cc1ae7f303e684407103774596ccfa3339f', public_key='407da39b94fc3f69ecdb2a6d714e122eb97a9e94775b5fc2eaa8a37848b343bd'),
KeyPair(private_key='c7dac2cd5272090a667e0360db7d8c2d2d14fad60874b21b3c3bf5508b6ddc29', public_key='40ea7231420d3e23e21b7edc77029785f219daa492abcd7e17a8d450e8b40a9c'),
KeyPair(private_key='441eb02790c8f1b20d86e72a55354242673f1e8d8e92f638f2adbbbd8f4c4b2d', public_key='42e2018ffb7d9d88fe54391f30c9d88b0604f6f8d50c8c4e977f5bc2adf14b8d'),
KeyPair(private_key='9fa1946a4246b671e432654ad9f40b76ed9d960985f4744db43540325aa412a2', public_key='448729fa4403c6c1c769c607c9c6315f831d06494ccaf4a0ddbd0cb68331bd11'),
KeyPair(private_key='1cb314b2763e398353301c59abad5caf9851266f98e67f90155276d60b5354c8', public_key='4525e2aa68fef55d0ed435e4b844d95fc21e789abdf1e185c4d32d2aa69435d1'),
KeyPair(private_key='e2ccfcbaea8b3755bd21969368f62caac3a952cc9fcacca75be83f5fe71662e4', public_key='452d0de5d6d3a54173de6d4e9d8fe8e19d4456bf49823ec08ea1a6e28eb5fde7'),
KeyPair(private_key='30304a17da9d4117ff767b1cb8cfb2af36de7f85c2fe0aa3d7e521f646ce4d27', public_key='45fcc223875da1700f0cffa92485359f261210b6d18cf60c7c7dea8e95cea3b9'),
KeyPair(private_key='d97bd2ddf47add44db7d41a7d272bd5ff7394ea0197200e7928a67929f34b456', public_key='470f7655bcedf04fcd6c2901e7e4d4d92828fefe4f88ef743ae4b06f03624b52'),
KeyPair(private_key='8bb6bc297b858e9dc551ef91527b025eebe4435978d8d8efb14addd37d32cf98', public_key='47956750a388a6bd98a84e0db436cf55a422007f408d8bff01a83460650ad5a0'),
KeyPair(private_key='674f83f2a30cdcc34bba76f6471e02179aea890a33d0ccdbaef8e863ede594c1', public_key='47f2c87ac222c51bec15eff43726bc93e4863a275c125d2f6ccc0ad8f8ccd829'),
KeyPair(private_key='a986b1c392de9ea6f0ce258d6e74633a0eef202c8527f504e607357cad97ac4f', public_key='49b9fc2704da4983c6cb1eb766964efe29366e1f0a1fc8c49d03663bbb2c13ca'),
KeyPair(private_key='f4b101a875116004d9343c751fc7da8cdad528274bb4c67ae1cc3acffa932df6', public_key='49d76f3a59249d0f246fdeedc79f83b7e78c5b0d3aaca27450bcf5a59b8abcc8'),
KeyPair(private_key='1fd5e617de20386f897e74e334e6b58fcfa709432ea8966f40f241e2bb8e08e7', public_key='4c06310f7beea4c4640fc112dcfa948bc75cd89f4a38b2dddc2c33eafd8a011b'),
KeyPair(private_key='b8c297dd87741efd822e74ffce3d4f0ff3a02336ee3b79b29b15d2e2bebcf924', public_key='4cbf28c782b252d006e1b5edcd1d7edf57a96916d84277599b139843da603a13'),
KeyPair(private_key='7790814d7e834c03fd09be3fbc0c360de6b79d00e17a9832fcc892009e1e8648', public_key='4e787085dffe458e1730219fc3ec2af36ead66acc2ffd5710b6e671ae7bf62c5'),
KeyPair(private_key='b4ae9301b3ffea461ad9815d6cca5bf6bf494148590bf4aae6a88de0c2d397d1', public_key='4f455b9ca2d268acde35b73e4e40d867522b53368bb9f78957b1c77364517268'),
KeyPair(private_key='83b67ce834e2e1021b71495129fac12ac66e7bac3c25863ae413a89a69d654d3', public_key='4f7a1d8d1ab8bb1fec3392412256472732da532daf625ed30d38f67d16afa04a'),
KeyPair(private_key='c7298edc21e58ca1b9cbf0a09403c0b424731b890ebc0d575567b534e576ebae', public_key='4fb31aeb90cf3cae0d02286bd6f101b5d510a6954a19450b2d403d29237f96de'),
KeyPair(private_key='ec8b50505582da991a2060216614b0e1e5264146f343c7cb6d723b891dd7a46c', public_key='504f9a8e17fddd6e262b9747db80f149ad2b18d96f3619dcc49e6769f74f6e6d'),
KeyPair(private_key='2e533638d0b7094149c6609e63c49c2b19af1b604005d48e2085e959e4c4a617', public_key='5122eac8a5c99b7da8f6d40d5f3269c6844dabde0418749ece2aeedc64c27703'),
KeyPair(private_key='f25741daaa68db608be3beff496de8f304793be99460c269079d3bcc987c1fa0', public_key='52341febed3967db5f26ee9b7baa2ccbb23443e02e3c8ec3b12557c8029e3f98'),
KeyPair(private_key='debed23f2a5bdde5e1959b8f39281d7a703472871300f8a318d84a74d383869a', public_key='56910701367ef32447512f4238928dda8f6a13a319895d3144d3c2d51f933e55'),
KeyPair(private_key='62cb1ef334b58f410d8c7573bb01217131b5be276da139e76f3da8e74e8bb238', public_key='5768226310ec49772f84ac96ce3d5370a306dca232ed7f0e097a55118347acee'),
KeyPair(private_key='2176cd910ba949b2ee7a1639ff0a40a560508f7e25a10af00a2484586b6437c8', public_key='5a3fff1ed432237e779fc6aa20d7549d043cc0dd92180ebee0346229598870f8'),
KeyPair(private_key='be0d013f0f1548f49790527b5af3f2893f3f1acfddc6e3c3bc8e71aa9ced55b1', public_key='5a54b3bbb3d19b14e1c08f600f03633e56c2d4749708fba2eb15db4aeebc9c43'),
KeyPair(private_key='afeb1020bb1aa51d464c3ba4747c8921ee0b8838f88ade5f7fb0bc0ca869c3c1', public_key='5c1a6c5668ca03e434e552a2ceb4a9619eab484fd0ffee8ec0ee6f13896943af'),
KeyPair(private_key='a6694bb6d93ce9dc9eee207f4f9a52ebf0a571388a037faf6bd07de216a19deb', public_key='5cf4412dab1a15f417b865e7ed45d4bda797f288d3755f1c826836b4d0bb1700'),
KeyPair(private_key='27cc3818b6a376f045e5b0be6962d7e4ee2f065b9503e784468e3ed81cf4a8fa', public_key='5e48eb44cdaec1ce22a49db5cae71d4cfaf92b65a15e0f7d78d6e2c2a55e5a92'),
KeyPair(private_key='96bed2ad6388e8e6eb3efef7e66190d669a660138af47431518b53524f927084', public_key='6020249d08b2d1bdea4ea503edf355037c87e9c901f750a03ea6593ac383db4b'),
KeyPair(private_key='c01819485030f6b18b55033bbffd31402f6e9b2a056073ff47a44959e1f27704', public_key='60ce84c74fe3864bba1036e6aadb1b488c8f00d24d82f6f3aaf33a0f934145a8'),
KeyPair(private_key='bb02784818e40282abdeb0df891a6f90d953da7b1dd66ba56815e663ec5853f4', public_key='63d6f72981da41fab984a9be47c35e1f618a8e6b1b2f807cb70a6d9b44a1d86d'),
KeyPair(private_key='a09b5f24d64fc66b682a0d034817d1d308627c9581e03364daec6040227c8a4f', public_key='64ce433d66e9d75eb1f328f67fbdb1c28f5dc9224d6f67cee9a7c5e8958decc4'),
KeyPair(private_key='4dcc1c8602dd08a55ff3727f491d1c5d0faae01a6025f1928d38e9441d726244', public_key='64da4885202e64a28c96a8442a6c918198ff8738725717e18c031592343ddef9'),
KeyPair(private_key='58d181469330720ae719ee18b72792f04464c8926aca9b0978bfb4a5e1555141', public_key='6532de34f5e12b5f9eab3773ee8b14b9ce23d221e9b54bbd680bf77887e7c979'),
KeyPair(private_key='c859fe72fe2845d02e4d7800874c0c25e0dc24a5868602dea48b219876bbee7b', public_key='65e85b23e4cd64c45bcbae0e51b5e4ec864ab7d43e0a79d8bc55af0d57c6e420'),
KeyPair(private_key='7be88e571fbb6df0a60ba5a4666c054aba2fd159818d9adee9c2df08b36f4bbd', public_key='670356416c651625f5a302ece99b0dcebd5c38c07a8df624056659db12da35f3'),
KeyPair(private_key='abbb74d1e8f5f2bf08be19f71336b34c3e5657548b2ef04ec1f955276cf6f854', public_key='6787d341c0c9c3750e88ce4c6b3b0d39a1e9252bec484633926259ebc4f9c839'),
KeyPair(private_key='c675906d144d16a9c193bee68848dd8f3d9c222b20c139b1d2afe9cfe236b2b8', public_key='6795c5849c42fc3c066791ece59a2d496e21571fd51cef705d33296e92bd84f6'),
KeyPair(private_key='0d8070e64dbc2851a54fc209d9d2d00052e1c51f91364b16c61b2d6acf89190e', public_key='67c730d776181ca9283b4619b9766e2433e3e0e0b3773b7abf688071d02d5f98'),
KeyPair(private_key='e27e2e5ddaf3e12e6adad3093e14ebf722bf544aacbf6dcde220943ae143a18f', public_key='67d6642a4b5af2a9c421b01b168f8719169a071f8bed6562b92da37b353f0b87'),
KeyPair(private_key='1543c3f2168a34bcd346a6ac7825afacfb06e45031d2235d5ffddee4bba0f0a2', public_key='693d4214d162919fc9360b5d37d799ab1640acff38de06170d45eaeec8ebd2d2'),
KeyPair(private_key='fc21f813a84df4b4d17f8b7f9ab55134bbbd3bba544485e11e2be29c15e04308', public_key='6980baeb7095d2d49b0d44a3c088a481e10f1d6f2a6ecc3e6247281cb6dbb1ec'),
KeyPair(private_key='ae091129fb7e4272444ad99c16ce34f76ec97cdad11f5ee3aa58eb6a6e753dfb', public_key='6a274bf5d540faee403de045acf20827c47f02c7820bcce3cbdef23b3d35eb9f'),
KeyPair(private_key='97de85c1360b95e0bb0c56f6a277b518863bbbaf83da1953f0c5d01e51aacd6a', public_key='6b5727eac4d72765884316ebdd67a021dee44bcb74d61f2ec2c71f9d90c7519f'),
KeyPair(private_key='9c5567131a1c03e68730f21efc375efb5401116e8958a4c7bc5ee4900e2066a8', public_key='6b82eb4e343047b278c5bbeefad3dab8f4de1374f9d1a596e61c09c55b733455'),
KeyPair(private_key='92f1c77b02d1a5f83b16a5621471ee51c1054eccc7cd691305612c20dcfcdf56', public_key='6cc88be9975a6cfa04fee5506c734b4d133a670e93ce8aad7831311a14694910'),
KeyPair(private_key='de0e7e04f85fa8609acd4918b84c5a8f8f2932c3306bcf16b164061781c1c779', public_key='6d0f0335287f39efa60b838140d0870546f1e80c1ffa1e58d0814941f7654a2d'),
KeyPair(private_key='911fd7dee9074b0ac50eb30e5583d260c8995dcb9c0f897dc243684a79b00fe5', public_key='6dfedff4f52b349d0c00561cb784260987b5e3a931b0bf1c70cca125244ae88e'),
KeyPair(private_key='80d5f23ec6a098e1e9c2d27929ba007723ad73289f098224a6c899bd5c59b55c', public_key='6e00404f2949c2abedbac6c823c2d6f91042ccc5c4f1ef5b3c306daecbe5f3d8'),
KeyPair(private_key='51aca8c7140a76d2d3ebbc0dc0bb55817805527f54de157f850128a9ac00d1a3', public_key='6e2fbba9d55c103a0f88e989ff86d980cd9d153741804b4c2f05f5117a4eaed4'),
KeyPair(private_key='7ba57a8098051a4288c58d44ebaa8bd6be32055eb0a31599a3c322497244aade', public_key='6f62f8a0863d51c5198e5cfb6bc7a28f5d170b4aec0c3029b5bf558a18984b37'),
KeyPair(private_key='8bf9db26a16a63a009daeeb9a49ab9082a1b28984df03583b121bf2cda9ebca8', public_key='6fb3a039f15daebefeda77a58f7187a4a6cc39b71ef847031f8dab050ab078e0'),
KeyPair(private_key='43943367377cdc91d83bd4b649a45c9ffc8aa388781f3fb1470655dffcb1ff99', public_key='6fe3807fb99c18173e56215400cd9d3b4ff43a3afa3fd34405ac36dcbb81d1c0'),
KeyPair(private_key='c846b8dd2ed5d53d9df9f18e685cb81992e0165b31cd32757905947bfef2df3e', public_key='7107bb5ee853e3583a358b1bf39770b749c68966752330d49926c21012af5ec8'),
KeyPair(private_key='b80440aa619191af71e5f80157bfe1390855308c3c11e3c9ad8f5fb14bf372c8', public_key='711e4d534ca0a4ca48a828534d933f40547e5cff8e854e7803607ef52828689f'),
KeyPair(private_key='7f6afe19128a022a564c46aff9f06642aad6f5dd8df11e1a105f2da1cd165f16', public_key='713ddafc7930dd890180987bc781a626cb24783deb3b99ebfcd053f11b1e3462'),
KeyPair(private_key='32fc86795d8a24f2dbf135a5262e2332e32aa439d5a2554ade352d38aa3e74a2', public_key='71a840ce1f36a1cf67421674e5dd3b4aada5e8f71a97cb4fdea514edc77dce2a'),
KeyPair(private_key='34c1e5c217aae749b48798101a5fb1f7b4e584939f8df5c0dbae88f64e3569a7', public_key='720719ce3c630ab967bcca8a502e2808d4dca28bdf9fb4627fb7921621369133'),
KeyPair(private_key='0688fab674ad94d2548bf3a8674c28e3e9974d13af3884c87281bf5d6775461f', public_key='7240fb6c4e5eb737dd80dceb8401dd66ce3cc1a2926a28f5290ec8a02ce6daf9'),
KeyPair(private_key='6c017576b171ecdaf12ffc8ad788b209b60250b8e3ea4f813474a738adbe21e3', public_key='72b29908ee0dd69aa71dc5eeca3b3184f59bef6d529073b05b8466e5a89fb7c5'),
KeyPair(private_key='13003dfcab8bac3acbe38dc0823e1c0d4e7d168140ac6807ca8984530eae0eff', public_key='75e8287030b2e28fec1967bf5b45ba2a80834e598b7a462de9d7f11384be2439'),
KeyPair(private_key='b47f418ffd1b662ce4a94eab2e8bbda2c68d15e06712d7fbfcbb4cfdf0d4bcd9', public_key='786dacb8f294f9db0fc21e5ecd45d97a5719f1da7806af51bce93860dbab8ff3'),
KeyPair(private_key='6a01a112dcf662b71e09f69efcf4b95bdfff3b1f4ecad06944ae074ee026c0ab', public_key='7a76826863fdaa7f0e87b6f65af069f77810bb533e86cc3bde4100ff37373bf8'),
KeyPair(private_key='5f353f5968a3fcf8034749fbe726de07a16e1cf42659eed76ee0c02235c4a196', public_key='7c19b9a00d032c598f3a23d7fdc28badef7595b0d75b72747e9514ef8d74bf49'),
KeyPair(private_key='54a89c2c402bf9863e0495161c3a163f041ca2d895db4947431177737e563fa4', public_key='7cdf83b6ee5ad21799bdc1b7caf614e31d42175f6418f44a278f850fa0c7e455'),
KeyPair(private_key='4878adc557b3e19f4a79343e491537f8c9906e6c868cce04e494413eb488ada0', public_key='7db0f469bf81dc275c849d21bd302829ebf12674ed53620f635d1ea9294a303b'),
KeyPair(private_key='2f1a298f707392bd785746ee4f3df2d3f3699fef9b7aea450e36df593825bbf3', public_key='804804e68e0b09b5ef10cc230cebd1611453374134dd4b03a822b365e0971ec1'),
KeyPair(private_key='418546d0b36843b87563d38cbc411f9d846bc3d02176939de54aad92d5105686', public_key='80c540ba7b69280fe1f982aba828c6b65198ee1b5f96989e9822071e1aa0260a'),
KeyPair(private_key='da074958c835943fae2512e8254639602a1f8cb0bcaf2272a08ddfde0410a15e', public_key='81737fd7b1e5038ba0e9c1f17b057068e92a855778cc322f74eeb7c70993942c'),
KeyPair(private_key='5e0961147a94ce86ae9f4c39d4dc85e5448ad2907647d8431ac572d4ec76f7e0', public_key='83479886ec0365defa80ead581bd48f1f34b9cdb205f32d48f5f6e40cb9dab11'),
KeyPair(private_key='49991f907d4954cdf384808297847ff54964efe1c85aa837a11b28a2210b9304', public_key='835e13c4972c6d56ff0c2b41dca8c14f99d6ff256821f71323b94404b0c40af0'),
KeyPair(private_key='10d5b0f6d82dd2d3a31bd6224030c22d2b34c8fe9b94ebc8b0a07addfe664f63', public_key='84858ff21f5dd07a56c9b00763c4fbc697b4b491d79c6e32beed7134903792fb'),
KeyPair(private_key='e6ca40b38b793310d787c7fd09713e0112520150435cb7ce62315c0300a4ce7c', public_key='84aa8570bdfd21e58913627b8a84cad3a6a8221d878278440fafbabbc4a424b5'),
KeyPair(private_key='819196101c6cf94d9bf7aaf4e9349c5082975fc8508b8a297aa667e9e379704e', public_key='87105e941c2d4eed29a7e5272dc0d2a7aae6dc718a77d491d0d796a5f18e8a03'),
KeyPair(private_key='10a31d2e85c6828f869978b7194f5c13a00dc8a5976623202e8c80a844ed9930', public_key='8711020fd3d1a7db8b328c04c4d1029aec3631c835e1190b1a5d6879540dcfd5'),
KeyPair(private_key='7ebe913487c3e76be509f4b2632da9d2b93f1a66a9ffc0225e0e2cd28c9b1534', public_key='898b7e9cf22163e4a95a2c6c6b1df4508f3dab2692d59671c70578bac61a8a6c'),
KeyPair(private_key='efdb47999e4fbe5409fb4bd8c3ae4b169fa2d31084d2cc416c54044ff09783ba', public_key='8a9c42935e53856ac0b518b51a39d0e08e0a4e519b7c94f0f6879a3b8a974629'),
KeyPair(private_key='7243fa0398bab64dda1e34a051d0976145d23b4d17cde13f59a1a1acae5ba370', public_key='8adace260cc5bb7956276a68590ce0ea7402a9489a8e07ee1e346128219d7467'),
KeyPair(private_key='e0279decc2c057ec415b371a997483d9198ec481ef9f88c80a48b39e63df2c47', public_key='8b4dbde229c3562a8da2da99842ed9a2c10ab424cc04ac125601f2e7ad92a1f4'),
KeyPair(private_key='5ea2c77b0c8328ca69f0c1fecf379455377bf8a805d54dd3933612aeb3d970a5', public_key='8b9f35f7cb951b4c89d450e418c921e6faf484f93c94a04ec00e310b624339d0'),
KeyPair(private_key='cb82faa58dea8d1295b41efe66e4dc5bb057c69ca718295c2ab6359937178710', public_key='8bb03c4ece1cec9062c00686b45845fcb3f10e8a48148df1073e626fe1d424cb'),
KeyPair(private_key='50fb900dab107e890af6a19c96b07d3ea9d06f88899039408690c1d4e12499d7', public_key='8bb5d118ffc38ebddcddc4e6a9c834840e765e3ae3b67cd880754ae85e7429b5'),
KeyPair(private_key='0a934d1cfbd939980390ebfd977e253a4034076cd498f9d863dabfd5dd1aab94', public_key='8c47a93168b9087382fa32e4fb7608f68a53a15a05e2d3f4dc932ea20a9b40ef'),
KeyPair(private_key='7c0c1d6b40273df4a2c9f0782557a58f4d37ad157f3401386ca59b8bd4e22153', public_key='8c4889aa2d49cb4a48820d565d716f759b29504178db637886507a8edf149e10'),
KeyPair(private_key='2511d00db11fd01101d8a60ef4435df5efd637d403490d562671c57aba7aa93f', public_key='8cf6be9ae145ea5f33a0e94f3b053bd64aefc73203fdcbf5b6979c22f4c28e0c'),
KeyPair(private_key='5fcd5c110608cb991c8629377b642e6c41aa2f0e33026f668f7d9ff61ba53562', public_key='8dc921ac8290034ce2670ee721c4ceedf7be8cf4a96c9f6632b03f723b5edbb9'),
KeyPair(private_key='4e35ebcafb40a734211dc832edf06cc6b357cfdde0abc0978a3b83898b84f7a4', public_key='8e00a1998a4dcdbc3b52f69903b4bafd4e030d4bbfe7f2917522b962ccefed06'),
KeyPair(private_key='521245e62daa2a1ced35dc79953ef6a6ac28e187cfa4e89e3ee6159a8b580473', public_key='8f6444828d822e85f0ff6a6c93be1e7a9be64e5f1a1be896382ef00bcefec5d4'),
KeyPair(private_key='7f84ff4fd6171f1e22a4dbe429f0692f587ac627581ea9eb79cb0be6da3bea31', public_key='8f948316d15be1346a92ea7380bebe2e7b1567aca4c8b8538987bc989d2db9b5'),
KeyPair(private_key='2574c8e500a0de1d3b299757516e5100cf801dec492ff1cb9d045ede78d67b1d', public_key='90bf468c811387c305ef45926aed0757c41fcbe277963ba7bb29b6c393fbf102'),
KeyPair(private_key='6b49de720a80e737f413f667decebe0d8f3d32fa411eed7e9e03427044d9c56e', public_key='9206af709bbab7f0171cbddee93c03a41283e93aa002742f457d6e18cfea75f0'),
KeyPair(private_key='44670c4ed177f7ca4067106720087b2039920d9aeed07fab3d3aea88f86d2b45', public_key='93accf34775d3d53f822852c2e60c2f54f4c93232df74ba1e4a171c8facc121a'),
KeyPair(private_key='290da61f8fa7841a77d7262db75b5aec2d7c7c20a644d3ebac73a511e82c5fb8', public_key='94d868fabd8e87da2433c3857b3af0b6469162eb9e9e42c3f19466b1477a09f9'),
KeyPair(private_key='ce871989a2307ecd68ef46368df046cb85c18eb3fe5c2771f09a31e8a15b677d', public_key='9572bcdc863c3cb8fedd5c89f4ead8dfc153224b66852a00f318e048851d3a81'),
KeyPair(private_key='3ceff6413aa4ac37c40b035a64208d0263fa9a6c53d935f878231037423a5a49', public_key='95a9cd64a96e9f81046eb8edc00efc26abdcbbe7588d284dd2fc27eeeb3abcf1'),
KeyPair(private_key='2a6018851984203e0983f0671e94fcf649ec04b614e5924f435081f7d1e3b44b', public_key='95c812922f7e2b956fec27d0eadcd4beb6856c775795c2407aff376261972f1e'),
KeyPair(private_key='a8ed10ec941dd2bed885a597cba2256bc0cad23733999be6db8881c9132ccc54', public_key='960bad0e5f7fd61a7bf1c5f782a25f2cae980af512c00db81ada48f0bea6ec44'),
KeyPair(private_key='a72bd208fe8934e820c8842867c0b1713fcf6e524e45a33cd0681a5bd9ad8030', public_key='9646e69ff1779c99be5b11a2bd1488cdaf400441dc6fe21c32b204a277530876'),
KeyPair(private_key='c0ba8aaf4901bb51772fc200c21bf6e0a5f8977d24c70f5c07001bde5cd0ecc8', public_key='9800e334c4d06f7b9512daf2e638af9e1faa011f13c86959b108960ef0e45a33'),
KeyPair(private_key='b4f1c7747591e8c42f9d88c0c6de0e386e8c2f52bf0a90c64c3965b1213229a3', public_key='993eff35df9fddaf9cb3a3e81cba41dcb0337f3da9edb9258988fc7910cf2ec5'),
KeyPair(private_key='0512e787a9690f70210d647bc999960b6e39eed5c05fbd5d5cbd59ffce02ae22', public_key='9b65cbf2712fbae6fea9f6bb1ec0dac70c068184fc47e053b170947bbe1af8dc'),
KeyPair(private_key='4210cb2ccc3a432277415e2b6eb45a6a26a37c650ab459c409b4f3f97f9c7a26', public_key='9c2a62bec16a55f0a0520152051c543bad9c04d4a89f178d0d021db721747088'),
KeyPair(private_key='88a07e811417e899de7435029fa96802341cc9bbd1b5fb2563d9dcd5bc65aa33', public_key='9cf2875060c6bc88ed638e9a512a91d499a4166be9be14928195973b8d36e9a2'),
KeyPair(private_key='bdf945a2b456294a26d72a17eb8efac1601785d00ae305d45f64d79a611befff', public_key='9d499c7a8a289c1ac424d15dbe9977f9282494164fcdb62fe932254f531800ed'),
KeyPair(private_key='5d6b2144aa9c5c3fd115549bdf632f8d61664ec694d965291c378e1dfe861ae3', public_key='9dc286e931e6aa21af4384f38a56a012606e7f7ec6f3194fcfcf37ee4898c175'),
KeyPair(private_key='b5a629491fa6db192920bff83498123c77b48d3e940c48235c0667d8faefcaac', public_key='9de08440b758a6baf27e9ad45dd434c98cfceacdf790132e179f2d90de537f31'),
KeyPair(private_key='af47862137d4e772f540029ae73ee01443c61288f3df9307a13d681de6ad2de4', public_key='a063cbb51f5d8b7e5b5eb9be3f8222c61287c84e0fc410e35c71c277ed12b9db'),
KeyPair(private_key='6623280da0e72d4d9ffa8e03ea529e4ca29b4cf625afb20da8ec121f5be58cc8', public_key='a1ea0a81092ba0a34ad73d5a535d7f607212ebd566186312b20429ee3c530fe0'),
KeyPair(private_key='2b9118568e8b92a2452136eebf22cb53d067c88e26f9b58492030bc7c15e6d2f', public_key='a3158195b3ca6dfb88b8f0cb0788b01f25ea11421104df071bd043ca27f2def4'),
KeyPair(private_key='1abd7bc7ee9b222eae0bb5a5faffd4f97380dff34cde2cf010e22ce782f31d41', public_key='a33d23b6190bc248beac9cd19c07df04b7b5f061d3ecd16c4bb3ea9933567953'),
KeyPair(private_key='889987ddd01baf0af4817f3c37365da93359af754f03ea5716a00cc3a32cd6d4', public_key='a46ac0e5e61517d0d7adc8a59ccd67ef12a017544ba86aad36af9418f7237f4d'),
KeyPair(private_key='2b7c592b9fe40f503bebc6fc51250f202396f71950f9e1bf54d69bfa24885a06', public_key='a48890a5b02b84dd87aabf479ce8c72ac9b9d206b89cd427f4b0350ef63e8ddc'),
KeyPair(private_key='d53c4a4c2d6c56752b683c2cc74240d782e6e9004ad80002f8ebb413712c95f0', public_key='a4a39c2534dcffdd1d363a7fdd1cf171ab0d545e1f9be1d2d3c62893f1a4708b'),
KeyPair(private_key='847f62416f4f5bd06b9980f80989f7f174341ca2e2787d4d2c687692ecf5c4a9', public_key='a52f13ac3024634e62872984c229fb3da9cf8b3b64ed074d07e9d5f365ffe775'),
KeyPair(private_key='b03321f7b627d262a2681bace8da3b6dba30c0fd26f4b2f704569f0ed73a9a2c', public_key='a5acdd6a7803df06393835136acffa4b56d45c49e8fb747208123264e22a76ff'),
KeyPair(private_key='de7067e16d72f1f3c1320a8149ae7412e41bf3b806bb6069639eecb657e7b7d1', public_key='a5d62c70598a277df24de4da9c674996985f58ea119f8f4b0193cf91813d00f3'),
KeyPair(private_key='375c6c385d6244c82f52b12c3826e6ac2b87c8ac5467ccb9d5808ce1b0c097e2', public_key='a69c8598f098a9c586e64af052858edab605693bd0a9e61f0ecacccc614c9a97'),
KeyPair(private_key='32bf2a225531e428556a4d2a16b802e2d7d4f0eabbc72b271fc4874f766c6232', public_key='a6fd16b9dd7f8302e434f60c4783d18a645bbfcfa55989b1b3b5e6da7194a803'),
KeyPair(private_key='e019b6849718bc5a5601d6b49a6514259b5337217f5b3785c6630c1985ca876f', public_key='a87bc204bb41f17086aa0c6aaa7b5f44aad4ee217d774a97f4327077720a00c9'),
KeyPair(private_key='dba7f8d373b8e40d34b73d4cb684ae2d2a0e7406665870d2c020bcab3938df5c', public_key='a95465aac6aa6053c06f2a3611e416a7096fe896636038b2e24cfb54e64a2bc2'),
KeyPair(private_key='c900f982e808b9532aa138aa0e25754db7aa508ee030e2aed21e0e06211481f9', public_key='a9afff6c82c560196dcc1750a094e18c58a572cadc5bd86adbd3dabc9325dad9'),
KeyPair(private_key='e705000d237b5947631302453881df1e687353a32f62511abd9f714ded9d23c7', public_key='ab0b9df47b1d2de76e8c4a656ee32df4079a69d055414c79d02532eb463e8973'),
KeyPair(private_key='d7445585ce2b30b8781ac6c0642217c7a530bad142da0ec37f07692d69297e20', public_key='ab32b4573478692e67ab0d8400844baa68b9d8ed211b37d60b7e86adc209ac5e'),
KeyPair(private_key='85fbef9b350f8252c93118eca65dd5081cdc23e531778674e39885dda03ce884', public_key='abc558183bf2e0a2b8264d9dcdae0f7c3850a080492257c57e785b2b3cc69fb2'),
KeyPair(private_key='0356c4f594b967cd37c1ed823a80b598579cb8cc7eb49dd873c33a27ce641014', public_key='ac586ec23631f1822847a150131cb59e36290c20fb5d02df7db4105a639fbd55'),
KeyPair(private_key='1c26289a19022b89f61723c530399d5028383942df1f65424a754d46144a4b39', public_key='acaf181739dc2325d5517533aa1d638262b8ccaa13c195f0d6c37fce53d7f4a7'),
KeyPair(private_key='4c5ff1339510f29c0be29bbe0eb8ab9af95d349b1cf5d47b0e4d8651def2b548', public_key='adbe324c9e293b50013ae6f46930cd7cfd4c69d7369a93a60f516afdcf30c719'),
KeyPair(private_key='a019251639cfa602f8f4bce54075a767cc0ac28b0fdd3ffb6a05e2781f7394aa', public_key='afd7aafc40e6b139a6e68236127b100d54f2a50de154bfb3569db00c6e560f14'),
KeyPair(private_key='b54dfb7ae899bba200ff169594ba37fe8ae0ccd5bccfe78ca11e056fff8cfc61', public_key='b0734073b57b9a7f9425411dba2ffc177b906f8498221d4fb85326cd2ed0b624'),
KeyPair(private_key='438aa03487b1791420f90cee518cf19d676e0006ccd87d3362fac2be0bb8c8c9', public_key='b07fe04ec5a2e5f6f69f0a6c198392758afae715ecb27710af9e9cfbb653550a'),
KeyPair(private_key='1ba2e69648fd4a521e6f7ec2461ab89131161b327e3417bdae23a495bbff4173', public_key='b3320c7a4b40e7dfe90c57547012a3afd77db9a907d6f8772f69c03112b0fb18'),
KeyPair(private_key='2c1dd6c1ca1558c42477e4d20ed873f876daca80448544ef4837cc4ba3f3cac3', public_key='b333b70e5bbf6f29f95ea5a8f14a3a05d617fb4b09293e5d0010b98bfafa76df'),
KeyPair(private_key='c4565bacb973639f1918cc3932777601601d790ed9fd7a9f936039a5ae5d87b8', public_key='b44998134fbf61fad13765fb532a494116cc61d9a8c82066f3dc782af3dbfd3d'),
KeyPair(private_key='0dd77f3b4403a350e8882d0b49b20cb8e3f5439bde69295019bd40e9498b7c15', public_key='b52a9a05cfec055c8d68f997ab5fb2e75350112399daf18281549a4be459dcd4'),
KeyPair(private_key='314b30cde4ba62545977bc64968ae69b4547af75be7f2993f9fb6391de8c2961', public_key='b55073b69448a3496092489dda37eecb8ab158ba04a3ad1893630036d6f4dd08'),
KeyPair(private_key='7ca5819f0352b23d3e8f5aad91950c53933505b924489311a4d6ba257c6c4b91', public_key='b7b03568a63d8446fde59577992368e782c1a710eb65980167944b13b916bfa3'),
KeyPair(private_key='c28726b5bebc1a3e78556a17cc3c5fff852ef10c5b680aff24bfd75e76606b60', public_key='b96488e09726737cf3216ed4cf6acc0023214a7b4c8376c5efc718bf4b755f7e'),
KeyPair(private_key='173bf9d02f87d96b90441b23a2f47c9329d4a3bee379c24c770b74582503d62d', public_key='b9f98d42cd82d82dc116ff49ab6901f14f992616c1038ff399f0a884c9d6d498'),
KeyPair(private_key='cea70acc35a4f06677861cad532ce55ba414b42a9b11346b86235e5510a01c5b', public_key='bc90533b780f9cec93e7b58aa18cd23c19658fa9993d750e69d088885213122d'),
KeyPair(private_key='e7cd9ea7e05935b5412e065a3a98b3c008049d432da094580966a167cb6f6149', public_key='bc9dc2064a3950151eb862bbc5e8b156752c5fac38cd585b069c9078833a5e94'),
KeyPair(private_key='c559c231c2b3abb8739669ac347db4d171e13041fdfc97b1f82e682cc19f095f', public_key='bd87a18d1e524fe7bd4c936b2159b9d93f359898d1ff63f03b903257b1dfaa5f'),
KeyPair(private_key='a765c42969486d790c6d3a5b486067b5b03eed4943dc5fe9c88b927ea60783ea', public_key='be0adbe3633ed3611d2d39e008d2912f5623b9422230427c5e5984a323e71e08'),
KeyPair(private_key='6807fcdcb98249bbbe98dec53b4d242a3ae38f16f53f45c2b73a695723eb8f99', public_key='bf30eac282d580e4a420736f147700a9df191a855bd6f2b642b2346259816c76'),
KeyPair(private_key='107f8631077bb679122facb41f13df912541e67df5b37dfc48becedc20e08ac8', public_key='c041a3337cd4abe014595422e3208446991c57047ee1e8992ae8f50712bee5eb'),
KeyPair(private_key='d7424c4158b5806e433ad5cfaa180ba2a9819472be4265188488eba1873a2766', public_key='c2c49fe65b7bd563e35a1561a9d01598da110da437ce6ff959f0462374da597e'),
KeyPair(private_key='4f682a1dc764308ac608289a9b18fe4cbb39bc7b3c581ed351ef044fa00a6888', public_key='c47762bc82ab7d68cac641def2f50bc4b4f53cc331947486a05cbfb9375da2ab'),
KeyPair(private_key='e64ec8e541ed39a957181cfc1a0fe9e5106d89d126e1d378d7296d9426d08d47', public_key='c5a54580b55279f454354efb99b9e2bab99d12fec6d9747a7b28ab168024593b'),
KeyPair(private_key='799963b12bd02e2d65cf697fffbbb7e14481bc3b05a8247e07def779d89d019e', public_key='c5fe30a98a16f343f61844a1c46e4295701afe4650640378253c4989ae0f63c1'),
KeyPair(private_key='39cc74a988c3857efa99fda8e1bbb6fb65f65dbcc195d2c15c77c492ff6a450e', public_key='c6aaf232cecdfcf9a279e40bff6a6444932b7cd15678855bba78f324736013ad'),
KeyPair(private_key='5ac3d63452f2889c71a63c29b0c00910cec9e5eaf4ca6e3cff60eefbca95c868', public_key='c7e0d3bd8cc2856d30e153be0f8ce272f43f7fe9cdbe6516ddc1c6e7cd1e0d0a'),
KeyPair(private_key='886ff6e663761ad33630cfbfc79cd2eb7a70316bce4cf1b32d0e84c06a951bec', public_key='c84b18342faccb5ac4876ab45bb21c1f9b04c77ebf9e3b0a385f0a905d4b3cb9'),
KeyPair(private_key='efbef5c1a4fcc819be0bf16c0fc7e9aa336d1df8de20275ec48a1e304155ff67', public_key='c8a43da4213d1d5f08e37434ff18349252f76a00e6b094fec3e3f33c2d608a9d'),
KeyPair(private_key='df7085aed2e4539edfce03f38406c6a8f88de8641b9faea58825b7fcbcf2922f', public_key='c8a63841f2713cadce75a2d65346ff45303b5fc636f9aafdccb0ea42d4197cea'),
KeyPair(private_key='daefcee990638d86e2cebee856fe50196a948627f039e8f9a62092d340fdcaf9', public_key='cab5eee5ecd3269cefbf0bfa52eb9d33b87294fd2e70e22dcd0affaab9a21265'),
KeyPair(private_key='a2b2a0a4f4cdec292cb25632b6ec38fc8ac1193e1c9e54a9b7ebae6534e803d7', public_key='cb50e0300d4f9f778b263a1076eb30c5daef8df848a002d43d332f9259150fb5'),
KeyPair(private_key='8c347e977a659291ce68e8c6af6a13fb2179a9d7f024e41fc5861e2136c8cab6', public_key='cb8bc278c9723f4110892231218392fd79cde4935b79663fc4aab73bda123f98'),
KeyPair(private_key='21a87be840004792567503f18d9fb826b0b58ec247917a112ec387fceee551e8', public_key='cbd94b17386643aba084eed8d1ca463212606cf6b29e1afb37db00b2d744ff5b'),
KeyPair(private_key='4d38ca46baa1b3a5b1cf8ae9e58b1e97d754c20108a17eb324c67426ca924233', public_key='cd8d22ffbb8264f146a4b2f7bdf9493c8ff1a7de5d369f28b5afc2eb2f8ee7b0'),
KeyPair(private_key='c7185f6b03679fbdbed15cd348edde84aaae415a9ad7b4f2e58ce7a3ce7255c3', public_key='cf6d23e519a6a2c32b17ba67a9a8b7141a6c337b1a3842ab488802134018c9b2'),
KeyPair(private_key='a01032963f2e6a84c40ac8bd1482a9c7097c3393112f2053fc6074ebfdcb9277', public_key='d058be6d20fb172f486e3e0d83df46ea1ed74a9679a9150185feb10637077fdd'),
KeyPair(private_key='c79c69ddeab0ee420b1b7195ff10e772d2ba3e257bcb77e32d79787d26fec048', public_key='d0d5862db4f2b464206d78dd39bee3088f6e0ad7ff0cdbf42811fde8c3ae5719'),
KeyPair(private_key='64018160898cc1ffb4d8fa4b3c9746ba7307c940bc854be075a9f4cda9485984', public_key='d14707701f9ae7ea983789a9aa889a60f24a08bf56cd47419e7acc7893bf35bb'),
KeyPair(private_key='89f542c1d513dd1595692746c9c69feba7c07e52486429c9360793606cc241c5', public_key='d1f9fe260e4e1a1f3e53b31416e0baeef026877d0cde04509aed5adf1ad7276c'),
KeyPair(private_key='b182f40c052d48f0d63ee56e3a991b9e7af683ad370346c536363fcfe0138cc3', public_key='d23102929f45fff90151d5c7a8cc7fe248041edf39d06062ea7d16c06150b3f9'),
KeyPair(private_key='f58d49032a5d0ba87b7d0cd226a0edd3b13bdd5ebb3c0cb22a7da0ff0f72182d', public_key='d232a111760dd22ee0b936c2190197512c23401db109f25ed2452b3815500b09'),
KeyPair(private_key='f0a2b8c172a3b20478a9e3b559bf65a7c19b6ff098c479aa4789a9b1cf3a2872', public_key='d2a26fa1747ebe6a9e34ab29aefba3bd0ffe5c3f9c99aa59fc555fa3a83d812e'),
KeyPair(private_key='09b98af59fab625ddf927d00864b035ec16bcb316bdeac139e4d7868dc81b3c3', public_key='d3649a297d58c0718cf5d8840bb3989073c2644b3a8b2d3bde5ebb63fccf4033'),
KeyPair(private_key='1081cbeb0c21a7e40f2a2e493fffb206b5fe79936c78c7d083e2175120c7e867', public_key='d39b15f57886896e95690c98f81203c120648b7bcb8f33f192f47604c67a12cb'),
KeyPair(private_key='df92e3727597cccb0e5c8e942d68c89a353c17f4b3f241ac1c8d17e5ab090b92', public_key='d4cea5e37884423503037417f14fcd9a10fd31f992605592e401b6f0574e5d63'),
KeyPair(private_key='2073aa8ff2890cbcccbae6c0500b925400b43ef143010a3192415298617591c2', public_key='d5266703bdd89e64ced50b1b903f3515435d0bd11adc7af7cbde60826a527eef'),
KeyPair(private_key='0dbc68d7eeeab16c6f8af06b00e43914f1fb226b6b8b1f06fcad416d0ba0af60', public_key='d612d1b6e01bac6b333a37235d2c4ce315b0b0fd3d6d88071c324b83c20807f3'),
KeyPair(private_key='a848d9aaa4c51c0508e0b405182f668c47bbdfb2d04e8f13b61ba087e5cdc61d', public_key='d617d552f47373ebbb13dbec6eb0fb8b543d28be145304cca2c0862ea4401cc1'),
KeyPair(private_key='13f5ed9b9ea2ec588d6bba04a6fb605d273f04ec45a6808aed52ea33781e6218', public_key='d652a8bfc34d42f93b43a67ab11717340f443174f1b33c3d7812145a61391f10'),
KeyPair(private_key='b597f7afab8969ec4a2c1400af050a0f427f727772be7aa7560c8aad82609f5f', public_key='d6b0c603a51fbcd56db1606a0591310876f93bdbf2309c7bdbc277a080368990'),
KeyPair(private_key='eebf2ceeeeaf271dab04f5d587d6950acf66619903d25efb3a741216e117cdd9', public_key='d876102991aecbaf8b88132bfa70427e4640837f4cbbfce585b4ff056060224a'),
KeyPair(private_key='c41292f4cb308f7470e2e4003bb5bde10d7fa06d154107572e53c3198d94f524', public_key='d93aa64668e966bdaf16d7458b319852c55e880735fb73e494df96828962e82c'),
KeyPair(private_key='9f22c452b5df293fc6d92f245c536fdc178c9c719a27e32dcdcabf4782e0f04d', public_key='da67ddd0133b29ee68373fbfab4cc32719d368a09f62d56b8a48aa8643469f91'),
KeyPair(private_key='c8646c150433cc46bf9bb2ed734133d7ab3b6d3d1af3d2980c87ba6d406a92d3', public_key='dc388ff21bb6fb21c932f635c76a23be3ba9051d4e1b356cd5b4a9c27a9a828d'),
KeyPair(private_key='8450b8884082959a7b5e91f3f60707b51adb4813b8a5e81169f681283ba59812', public_key='dcf046fd29eaa5dc4f586feed119e1b76d6504bfcfb0227213010a6ef18a4c77'),
KeyPair(private_key='8c4109bc8250fa2b6bb24bae5e6c719d4092c51cb714401563f0add65049bd18', public_key='dfb5463ccb376d31b2285c402cc6bcf176a52bdd4139d12a8c3ae73e84cf7f90'),
KeyPair(private_key='602e797c1472182fdaa28bec971559a0c2bf75e56bb719f89d50455b7a8e2b84', public_key='e1dfd97285bf66e86b926d338a8b304854a89cd1b58f0a92da2ae1ead4742660'),
KeyPair(private_key='ba5e1b70043b0fa0ff5b8498aa16211e074ce2d81be175f7dd540629372a33f7', public_key='e2ba03d384ec799ca0e4472db07da7322c8e322f40e6b1793dc9a03d2fd48496'),
KeyPair(private_key='844620cd1839a059e4e5dc85eac4ea1fa67c70fc67e0126f3b10b0fb4e8f11d8', public_key='e2c4cae110f716a7264a339944c43a33cde4b1a2167d16af4f2216efdbf1489f'),
KeyPair(private_key='ff2ba092524bafdbc85fa0c7eddb2b41c69bc9bf066a4711a8a16f749199e5be', public_key='e2d64cea324a5a890326fc64c302cae1829d34ff4a584ff6a18e5026ad21f31f'),
KeyPair(private_key='0165029260090bdb552456569b655412e5f2b949e98c2998950156049ba1d455', public_key='e4117745b3896cad40ff11f849471145af424ff201a6bd6d8964c5a291ebbce6'),
KeyPair(private_key='0475842da178f373612e4f88832b2c2b03127e338469723cc02b8b69e551b096', public_key='e4f8a55de8e8bc6141750455fe512306207de7d5519f481900b86aedaaf3b902'),
KeyPair(private_key='79d764bce41d0922e7d7bbf20288fc38efbc26a6aef6ebf790c45fe06f39a5c3', public_key='e5126088c5f26503a03f046040ae2e7740e33b2f15e16a1beafd94829f467cfc'),
KeyPair(private_key='91cedd4ed754cafaea9615fe234452b38cf70e3766d301da1f98b806f3ba7fd1', public_key='e56725d157990a26be17301fc08121ff704c0af2b70a878236283ad042fba8fd'),
KeyPair(private_key='2b2bdfe4288b705896a93949590fcde9cba2782d672708d3ce639111d422a551', public_key='e63c62e1eea4e76d9a725893ae01048330a8ec1f211a756113433ecd70758978'),
KeyPair(private_key='093dc495f13f3985ef66099088900e8c0d0dfb1fe57435b7d2ac397ffa31de0f', public_key='e684e54e61b427c3c503aa47c1a4a479d89fa4a720125c3e806fbda7ec73170b'),
KeyPair(private_key='26c705fba73c9810923ff5a9e0bf6574db70bc2d13071314efa90f1226ef0a6f', public_key='e6ecd9b3f360df833f45f263aa958cb1dfbbc425fba65310119fce5692502ff5'),
KeyPair(private_key='7fa16da7abee6912ac95312ea64bbf0491e5d4f48782c29d07e87aed1498cb7c', public_key='e773aeb39ccbe965837928c150ca1dfd0ab93b71c49f294a9fbcc98b73b492d0'),
KeyPair(private_key='b601e6dc6b13637f15b629f045db0bb928b2d26fd731683b252f32c3c2a57e3d', public_key='e7b6c9032f92fc7d83701e3b749123feeab48e9a1213788021678e798a6733d1'),
KeyPair(private_key='e18814d51f0e0cb63f50b39377b923eab5496c6c783ab57c27ece7fd4c158244', public_key='e84b0ca96139b9c5e1a58b55e9d84682318a2030eaf24923204a1473ed659b34'),
KeyPair(private_key='bf6f7563b6415d874306a1f192947dd0ad3da66516ed040b84e4755e1693e1fe', public_key='e84bdac70ba27d0e1c08f0c3a161a0bb05b5a34aa6a5fd65e4f48fa8de21c343'),
KeyPair(private_key='597623f0b50e82008d52644983699724538b4307efbe3d0b992c651ca7f860c8', public_key='e8efbecd4f7067ec163a16e0ca6cf9b8c832b9cc9d2d1cc0ca1a642ca630aefb'),
KeyPair(private_key='d781da7d37bf253ac01f57567ef2405346fa9729c8bed3d072acdcd98385d941', public_key='e9e771d51a2dda33837449e6fc69fbe2d3e97cc7777349b858826d1944221d9a'),
KeyPair(private_key='bce040c25913f0408be99bf765cfef9dd8ac8bb6df8405897af5851e141b6aeb', public_key='ea6fb327eeb4190ecc5d7e50505e4b7ed251c703a86ac74d8f6c82c066cba89c'),
KeyPair(private_key='b1f9643e22ae0a86c640338e9a73b3c6e408b0fed475f73c91e6185e9c2942c6', public_key='eabe5a1a0750d2a8745709bb0bdb24f63c6a8ac3a887b9bed40b34b0598ddf08'),
KeyPair(private_key='a120b8b6eef9e744019c5a6d557203cdd7765e4a5ec0184c5f79d3869fb26286', public_key='eb6a22883a765cbec6496afd505e1793072d92ac0ee9afc8039469fba3ad9bcb'),
KeyPair(private_key='fddebf31c6b06a6bf34cb1ddb56250a894b9b84aa81834d53faddb79acf94884', public_key='eb8e3c22eb47eb213c975866d3291a728f767e2ec989563da911a44f9efaa132'),
KeyPair(private_key='9b792e08ac6a6b1520bc251206eb603a047846e17dc87490b2a2ec55cfe43362', public_key='ebe5941e739c2fa328fa5644af3d3a76367a91245c90f5d1691d851ec55f2921'),
KeyPair(private_key='50ac624d8b059e4ba8f3f7138f2b3ba10ede29392a8c2d4afe039d030b56477c', public_key='ed0c0e6fb361ffba50d9eb1c930548220aac65dedccdd08069acbcfcd28cc369'),
KeyPair(private_key='fc8e645817c668f22bc900988125afdfa89625402b128e5864768696cfd0e472', public_key='ee17847ce12206ba8ae98fe4d5b109c3054efbd06a8ab811dd7d54e9aec4a515'),
KeyPair(private_key='9cb1465f8f28b8344aefcf7ec22811fa0eb00697f843992fdc5433c0ed091743', public_key='eecdd882673d96f08dab25be4fda3183e1fcd88a29b5826a38bca66c767b5630'),
KeyPair(private_key='e2e35f930128dc9e6df1fd9bc5f20aecad750d9566b4d0e0d11626f88c68fce0', public_key='efc25d5149685deab7c8867e16ca80a49440bb440406aadfd4702edb56a11456'),
KeyPair(private_key='1f2139b852cbf440cfb20eadf5dc207bd0888bc12b16fd1733d1a24b5d348f30', public_key='f00cbd8764b13ac2c3a645ae7f4389123206a71feca2c36846aafa5f3d52e676'),
KeyPair(private_key='624967ac27e4725192cff82c2389cce074eae79303443425c71d24ab1b5932bb', public_key='f03af1761ecaad90249d0c8c9798135cbf4f41db5d0ed07f9eda35051dc3ebef'),
KeyPair(private_key='217e511435c647f504b4da629b80667efb52146618dc374ac34b8e80a26e64d4', public_key='f3854680cb3e1122108b2c328c581506cd6ad0008bf00078544d5da2b45a6195'),
KeyPair(private_key='a1ad0b1d75c03d14d6b6ffc2f2c16ae73eaf751148e1c1473863b5b87ca22bc7', public_key='f3d6a7556f6b5bb2cad53756699143c40b4e2a536bcf558125f3c1aafec9aa6d'),
KeyPair(private_key='2d66696bee4bacd4ebbdf8285b447d1d9d6571c9de88721f193c9848ce6776e7', public_key='f4925f2ad782a8b03cbb9805aa1a9182ff9a728ab8158d5ad8110f20669f4971'),
KeyPair(private_key='d5d6d7a433ac9e3ed415607e723c614db13d56b307e7e4890867353a973368f8', public_key='f65c22f8bd6ad8dab34e5507f609b7a4d5732deba8446bd85ba4a0f933cb8f1f'),
KeyPair(private_key='415e0da84949fc64e92dfd8114d5647f31d38e5b36a6d98416891b46e176b095', public_key='f6963b6c1ca127e945d391e50ac1896b0a298ef7495ee55d32b84e462ef6f050'),
KeyPair(private_key='b37d119d584232ee9c1ace08d3dde80c7d87b1cf441bba68cd252e83a607410b', public_key='f7382d55a40fae6614da8f25ade5fc90e3439d02a74b576bb762936dab3ed021'),
KeyPair(private_key='53d9c017de6a5250260d08356e3e98191b2d504ad27e81dd499e614b55d89764', public_key='f7499d2ba3e6672b1058b5196fa1a83fa529c431643ad2902aef7fd39f742db7'),
KeyPair(private_key='cd76fbc9e3f3b50dcbc096538acb37ef21ca7e5d3c7801c563ef73fe6439eb83', public_key='f86d67e5ed0c9956a83e72f11106da781460170ae4ae5d7381f5a46eba4feeb3'),
KeyPair(private_key='01174ded759806aa0c42a035c4d7ee035eada828079d9ed6f387cae11e0def87', public_key='fa6393dd55f96136127aae4c950bdb18c33a5901640ee59a866c03c62f2c1450'),
KeyPair(private_key='469bc177b014b24484ae8fba945db3d502a9123191fbed55d257008be0742588', public_key='fbb99a283794505d648cc4fdaa8c6b62d4677caec881ff955b46f8f30e23483d'),
KeyPair(private_key='9a32ff7b7c6e25527e0b4e5bec70596c6094e6529d56bf61cbd1ca26d3e92b10', public_key='fc99ed51e788bd040d4685721c46aaa580de06190cb77e51e6e3e50d38b1c6a2'),
KeyPair(private_key='4cb91a5aea035873824020844a94f7dea743e1094d86e652462dc6eee9ba2f56', public_key='fd6470fcca3641fbad339f918ba477518d8b24ac068b665c2186c39a9114a29a'),
KeyPair(private_key='07079abafe8e84ce1c6de0ad7199f9e8d09d40c22b5821dde854f58c15145f9b', public_key='fdfec879b7c80b212c2cd79fe98108893e9ae6d924702e3b87ae6a27aeb1aef4'),
KeyPair(private_key='b441a58a4f6d6696cae9e962f08240fd869d9e108e11b0f194b8e0ed1b7e5fd2', public_key='fe27b935f69c97455fef205ff9c56d554a2b766091c03a054401e3884936d31b'),
KeyPair(private_key='511dc7f1333730838246b756b98fbaa0cc2fad035371e0a438c5bb984029a6ea', public_key='ff28ba26d76068d049dc35f259054433fab01b09027097657e0968ae8da65513'),
KeyPair(private_key='2b85f5f6f53bcde111c49e10659e3e56a59ca6a1c23d8e8df3b000b225ed1c8f', public_key='ffe280d1968e65a546076e52255eb2d4c7d17c187d75c1954e7b96aac5e6a0f7'),
]
| 168.888889
| 171
| 0.90685
| 2,108
| 51,680
| 21.946869
| 0.289848
| 0.090783
| 0.110237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.482254
| 0.035023
| 51,680
| 305
| 172
| 169.442623
| 0.445438
| 0
| 0
| 0
| 1
| 0
| 0.743034
| 0.743034
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0033
| 0
| 0.0033
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7713608ade06559daa26425927316ef6c8372d80
| 24,724
|
py
|
Python
|
tests/types/test_ip.py
|
Fozar/clickhouse-sqlalchemy
|
88fd630856655cc470430b365dce7e85516abf62
|
[
"MIT"
] | null | null | null |
tests/types/test_ip.py
|
Fozar/clickhouse-sqlalchemy
|
88fd630856655cc470430b365dce7e85516abf62
|
[
"MIT"
] | null | null | null |
tests/types/test_ip.py
|
Fozar/clickhouse-sqlalchemy
|
88fd630856655cc470430b365dce7e85516abf62
|
[
"MIT"
] | null | null | null |
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
from sqlalchemy import Column, and_
from sqlalchemy.sql.ddl import CreateTable
from clickhouse_sqlalchemy import types, engines, Table
from tests.testcase import BaseTestCase
from tests.util import with_native_and_http_sessions
@with_native_and_http_sessions
class IPv4TestCase(BaseTestCase):
required_server_version = (19, 3, 3)
table = Table(
'test', BaseTestCase.metadata(),
Column('x', types.IPv4),
engines.Memory()
)
def test_create_table(self):
self.assertEqual(
self.compile(CreateTable(self.table)),
'CREATE TABLE test (x IPv4) ENGINE = Memory'
)
def test_select_insert(self):
a = IPv4Address('10.0.0.1')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).scalar(), a)
def test_select_insert_string(self):
a = '10.0.0.1'
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).scalar(),
IPv4Address('10.0.0.1'))
def test_select_where_address(self):
a = IPv4Address('10.0.0.1')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).filter(
self.table.c.x == IPv4Address('10.0.0.1')).scalar(), a)
self.assertEqual(self.session.query(self.table.c.x).filter(
and_(IPv4Address('10.0.0.0') < self.table.c.x,
self.table.c.x < IPv4Address('10.0.0.2'))).scalar(), a)
def test_select_where_string(self):
a = IPv4Address('10.0.0.1')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).filter(
and_('10.0.0.0' < self.table.c.x,
self.table.c.x < '10.0.0.2')).scalar(), a)
def test_select_where_literal(self):
a = IPv4Address('10.0.0.1')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
qs = self.session.query(self.table.c.x).filter(
self.table.c.x == '10.0.0.1'
)
statement = self.compile(qs, literal_binds=True)
self.assertEqual(statement,
"SELECT test.x AS test_x FROM test "
"WHERE test.x = toIPv4('10.0.0.1')")
def test_select_in_network(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.0.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(IPv4Network('10.0.0.0/8'))).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.0.0.3'),)
])
def test_select_in_list_address(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.0.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_([
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
'10.0.0.3'
])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.0.0.3'),)
])
def test_select_in_list_network(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(['10.0.0.0/24',
'10.1.0.0/24'])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.1.0.3'),)
])
def test_select_in_list_network_and_address(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(['10.0.0.0/24', '10.1.0.3'])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.1.0.3'),)
])
def test_select_in_list_empty(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_([])).all(), [])
def test_select_in_string(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.0.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_('10.0.0.0/8')).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.0.0.3'),)
])
def test_select_not_in_network(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.0.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(IPv4Network('10.0.0.0/8'))).all(), [
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(IPv4Network('10.0.0.0/8'))).all(), [
(IPv4Address('192.168.0.1'),)
])
def test_select_not_in_string(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.0.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_('10.0.0.0/8')).all(), [
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_('10.0.0.0/8')).all(), [
(IPv4Address('192.168.0.1'),)
])
def test_select_not_in_list_address(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['10.0.0.2', '10.1.0.3'])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['10.0.0.2', '10.1.0.3'])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('192.168.0.1'),)
])
def test_select_not_in_list_network(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['10.0.0.0/24',
'10.1.0.0/24'])).all(), [
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['10.0.0.0/24',
'10.1.0.0/24'])).all(), [
(IPv4Address('192.168.0.1'),)
])
def test_select_not_in_list_network_address(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['10.0.0.0/24', '10.1.0.3'])).all(),
[
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['10.0.0.0/24', '10.1.0.3'])).all(),
[
(IPv4Address('192.168.0.1'),)
])
def test_select_not_in_list_empty(self):
ips = [
IPv4Address('10.0.0.1'),
IPv4Address('10.0.0.2'),
IPv4Address('10.1.0.3'),
IPv4Address('192.168.0.1')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_([])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.1.0.3'),),
(IPv4Address('192.168.0.1'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_([])).all(), [
(IPv4Address('10.0.0.1'),),
(IPv4Address('10.0.0.2'),),
(IPv4Address('10.1.0.3'),),
(IPv4Address('192.168.0.1'),)
])
class IPv6TestCase(BaseTestCase):
required_server_version = (19, 3, 3)
table = Table(
'test', BaseTestCase.metadata(),
Column('x', types.IPv6),
engines.Memory()
)
def test_select_insert(self):
a = IPv6Address('79f4:e698:45de:a59b:2765:28e3:8d3a:35ae')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).scalar(), a)
def test_select_insert_string(self):
a = '79f4:e698:45de:a59b:2765:28e3:8d3a:35ae'
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).scalar(),
IPv6Address(a))
def test_create_table(self):
self.assertEqual(
self.compile(CreateTable(self.table)),
'CREATE TABLE test (x IPv6) ENGINE = Memory'
)
def test_select_where_address(self):
a = IPv6Address('42e::2')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).filter(
self.table.c.x == IPv6Address('42e::2')).scalar(), a)
self.assertEqual(self.session.query(self.table.c.x).filter(
and_(IPv6Address('42e::1') < self.table.c.x,
self.table.c.x < IPv6Address('42e::3'))).scalar(), a)
def test_select_where_string(self):
a = IPv6Address('42e::2')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
self.assertEqual(self.session.query(self.table.c.x).filter(
self.table.c.x == '42e::2').scalar(), a)
self.assertEqual(self.session.query(self.table.c.x).filter(
and_('42e::1' < self.table.c.x,
self.table.c.x < '42e::3')).scalar(), a)
def test_select_where_literal(self):
a = IPv6Address('42e::2')
with self.create_table(self.table):
self.session.execute(self.table.insert(), [{'x': a}])
qs = self.session.query(self.table.c.x).filter(
self.table.c.x == '42e::2')
statement = self.compile(qs, literal_binds=True)
self.assertEqual(statement,
"SELECT test.x AS test_x FROM test "
"WHERE test.x = toIPv6('42e::2')")
def test_select_in_network(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(IPv6Network('42e::/64'))).all(), [
(IPv6Address('42e::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('42e::3'),)
])
def test_select_in_list_address(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('7::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(['42e::1', '42e::2',
'f42e::ffff'])).all(), [
(IPv6Address('42e::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('f42e::ffff'),)
])
def test_select_in_list_network(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('a42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_([IPv6Network('42e::/64'),
IPv6Network('a42e::/48')])).all(), [
(IPv6Address('42e::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('a42e::3'),)
])
def test_select_in_list_network_address(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('a42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_(['42e::/64', 'a42e::3'])).all(), [
(IPv6Address('42e::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('a42e::3'),)
])
def test_select_in_list_empty(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('a42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_([])).all(), [])
def test_select_in_string(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.in_('42e::/64')).all(), [
(IPv6Address('42e::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('42e::3'),)
])
def test_select_not_in_network(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(IPv6Network('42e::/64'))).all(), [
(IPv6Address('f42e::ffff'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(IPv6Network('42e::/64'))).all(), [
(IPv6Address('f42e::ffff'),)
])
def test_select_not_in_string(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_('42e::/64')).all(), [
(IPv6Address('f42e::ffff'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_('42e::/64')).all(), [
(IPv6Address('f42e::ffff'),)
])
def test_select_not_in_list_address(self):
ips = [
IPv6Address('42e::1'),
IPv6Address('42e::2'),
IPv6Address('42e::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['42e::1', '42e::3'])).all(), [
(IPv6Address('42e::2'),),
(IPv6Address('f42e::ffff'),),
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['42e::1', '42e::3'])).all(), [
(IPv6Address('42e::2'),),
(IPv6Address('f42e::ffff'),),
])
def test_select_not_in_list_network(self):
ips = [
IPv6Address('1234::1'),
IPv6Address('42e::2'),
IPv6Address('beef::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['42e::/64', 'beef::/64'])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('f42e::ffff'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['42e::/64', 'beef::/64'])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('f42e::ffff'),)
])
def test_select_not_in_list_network_address(self):
ips = [
IPv6Address('1234::1'),
IPv6Address('42e::2'),
IPv6Address('beef::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_(['42e::/64', 'beef::3'])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('f42e::ffff'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_(['42e::/64', 'beef::3'])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('f42e::ffff'),)
])
def test_select_not_in_list_empty(self):
ips = [
IPv6Address('1234::1'),
IPv6Address('42e::2'),
IPv6Address('beef::3'),
IPv6Address('f42e::ffff')
]
with self.create_table(self.table):
self.session.execute(self.table.insert(),
[{'x': ip} for ip in ips])
self.assertEqual(
self.session.query(self.table.c.x).filter(
self.table.c.x.notin_([])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('beef::3'),),
(IPv6Address('f42e::ffff'),)
])
self.assertEqual(
self.session.query(self.table.c.x).filter(
~self.table.c.x.in_([])).all(), [
(IPv6Address('1234::1'),),
(IPv6Address('42e::2'),),
(IPv6Address('beef::3'),),
(IPv6Address('f42e::ffff'),)
])
| 36.199122
| 78
| 0.459392
| 2,741
| 24,724
| 4.060197
| 0.036483
| 0.135861
| 0.088058
| 0.096864
| 0.964238
| 0.956869
| 0.954893
| 0.940246
| 0.934406
| 0.921916
| 0
| 0.084816
| 0.372432
| 24,724
| 682
| 79
| 36.252199
| 0.632444
| 0
| 0
| 0.824348
| 0
| 0
| 0.092946
| 0.003155
| 0
| 0
| 0
| 0
| 0.088696
| 1
| 0.062609
| false
| 0
| 0.010435
| 0
| 0.083478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
771e9d64cebaf8a884ebf18b2b19976d81a110b9
| 1,864
|
py
|
Python
|
train_data/my_gen.py
|
qingyu95/ICDAR2017-DATASET
|
393b870b9e8b9690abb6957cf80de53961d1c92f
|
[
"Apache-2.0"
] | 1
|
2020-12-31T09:28:42.000Z
|
2020-12-31T09:28:42.000Z
|
train_data/my_gen.py
|
qingyu95/ICDAR2017-DATASET
|
393b870b9e8b9690abb6957cf80de53961d1c92f
|
[
"Apache-2.0"
] | null | null | null |
train_data/my_gen.py
|
qingyu95/ICDAR2017-DATASET
|
393b870b9e8b9690abb6957cf80de53961d1c92f
|
[
"Apache-2.0"
] | null | null | null |
import os
import json
train_dir = "ICDAR2017_train"
for i in os.listdir(train_dir):
if i.endswith(".txt"):
output = []
with open(train_dir+'/'+i,"r",encoding="utf-8-sig") as f:
for line in f.readlines():
try:
line1 = line.replace("\n","")
tmp = {}
tmp['transcription'] = line1.split('"')[-2]
squadsp = line1.split(',')[:8]
points = []
for j in range(0,len(squadsp),2):
points.append([int(squadsp[j]),int(squadsp[j+1])])
tmp['points'] = points
output.append(tmp)
except Exception as e:
print(e)
print(i)
break
with open("train_label.txt","a",encoding="utf-8") as ff:
ff.write(i[:-4]+'.jpg\t'+json.dumps(output)+'\n')
train_dir = "ICDAR2017_test"
for i in os.listdir(train_dir):
if i.endswith(".txt"):
output = []
with open(train_dir+'/'+i,"r",encoding="utf-8-sig") as f:
for line in f.readlines():
try:
line1 = line.replace("\n","")
tmp = {}
tmp['transcription'] = line1.split('"')[-2]
squadsp = line1.split(',')[:8]
points = []
for j in range(0,len(squadsp),2):
points.append([int(squadsp[j]),int(squadsp[j+1])])
tmp['points'] = points
output.append(tmp)
except Exception as e:
print(e)
print(i)
break
with open("train_test.txt","a",encoding="utf-8") as ff:
ff.write(i[:-4]+'.jpg\t'+json.dumps(output)+'\n')
| 39.659574
| 74
| 0.425966
| 208
| 1,864
| 3.769231
| 0.269231
| 0.061224
| 0.066327
| 0.020408
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0
| 0.027523
| 0.415236
| 1,864
| 47
| 75
| 39.659574
| 0.691743
| 0
| 0
| 0.869565
| 0
| 0
| 0.086863
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.043478
| 0.086957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
774f30fa2d31bc62f30ab3ecdd8496eb4cbc6757
| 15,798
|
py
|
Python
|
image_to_caption.py
|
Citaman/Image_To_Caption
|
82470bb8e4c1f386cc3b7e76f2a92bcd929bab02
|
[
"MIT"
] | null | null | null |
image_to_caption.py
|
Citaman/Image_To_Caption
|
82470bb8e4c1f386cc3b7e76f2a92bcd929bab02
|
[
"MIT"
] | null | null | null |
image_to_caption.py
|
Citaman/Image_To_Caption
|
82470bb8e4c1f386cc3b7e76f2a92bcd929bab02
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Image to Caption.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1AfoHgf3pf84EzaKqdCk7wR3bm7cCdmJ4
# Image To Caption
## Download and load the Dataset (Flickr 30k)
### Install kaggle and dataset
"""
import os
os.environ['KAGGLE_USERNAME'] = "YOUR_KAGGLE_USERNAME"
os.environ['KAGGLE_KEY'] = "YOUR_KAGGLE_KEY"
!kaggle datasets download -d hsankesara/flickr-image-dataset
!unzip flickr-image-dataset.zip
"""### Start variable"""
import pandas as pd
import matplotlib.pyplot as plt
df = pd.read_csv('flickr30k_images/results.csv',sep='|')
df.head(3)
df_new =df.rename(columns={' comment':'comment',' comment_number':'comment_number'})
df_new.at[30,'comment']
def inspect_datsete(n):
img =plt.imread('flickr30k_images/flickr30k_images/flickr30k_images/'+df_new.at[n,'image_name'])
plt.title(df_new.at[n,'comment'])
plt.imshow(img)
inspect_datsete(40)
len(df_new)
"""## Test model for text generation only"""
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
import keras
import numpy as np
import spacy
import string
#!python -m spacy download en_core_web_lg
import en_core_web_sm
nlp = en_core_web_sm.load()
#nlp = spacy.load("en_core_web_lg")
def most_similar_vec(word, count=10):
by_similarity = sorted(word.vocab, key=lambda w: word.similarity(w), reverse=True)
return [w.orth_ for w in by_similarity[:count]]
sentence = df_new['comment']
sentence.fillna(' ')
#table = str.maketrans('', '', string.punctuation)
dictionary = []
for e in sentence:
try:
a = [word.lower() for word in e.strip().split(' ') if word.isalpha()]
dictionary +=a
except:
print(e)
len(dictionary)
dict_set = sorted(set(dictionary))
len(dict_set)
dict_set[:10]
''' TRreeees longgggg
count_word= []
for e in dict_set:
count_word.append((e,dictionary.count(e)))
sort_count_word = sorted(count_word,key= lambda x : x[1],reverse=True)
sort_count_word[:1000]
'''
corrected_sent=[]
for i,e in enumerate(sentence):
try:
corrected_sent.append('<start> '+(' '.join([word.lower() for word in e.strip().split(' ') if word.isalpha()]))+' <end>')
print(e)
len(corrected_sent)
#vocab_size = len(dict_set)+2
vocab_size = 5000
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=vocab_size,oov_token="<unk>",filters='!"#$%&()*+,-./:;=?@[\\]^_`{|}~\t\n',split=' ', char_level=False)
tokenizer.fit_on_texts(corrected_sent)
a = tokenizer.texts_to_sequences(corrected_sent)
len(a),int(len(a)/4)
from functools import reduce
max_length = len(reduce(lambda x,y : x if len(x)>len(y)else y,a))
x,y = [],[]
count =0
for e in a[:int(len(a)/pow(2,6))] :
for i,_ in enumerate(e[1:]) :
x.append(tf.keras.preprocessing.sequence.pad_sequences([e[:i+1]], maxlen=max_length)[0])
y.append(tf.keras.utils.to_categorical(e[i+1],vocab_size))
count+=1
print(count)
#len(x),len(y)
len(x),len(y)
from tensorflow.keras.layers import Input , LSTM , Embedding,Dense,Dropout
from tensorflow.keras.callbacks import LambdaCallback
from tensorflow.keras.models import Model
input = Input(shape=(max_length,))
embedding = Embedding(vocab_size,256,mask_zero=False)(input)
drop = Dropout(0.5)(embedding)
lstm = LSTM(256)(drop)
output = Dense(vocab_size,activation='softmax')(lstm)
model = Model(inputs=input, outputs=output)
model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
tf.keras.utils.plot_model(model, "my_first_model.png",show_shapes=True,show_layer_names=False,dpi=150,expand_nested=False,rankdir='LR')
def test_model(batch,_):
#_could_use_gpu_kernel = False
intext ='<start>'
for _ in range(max_length):
test_seq = tokenizer.texts_to_sequences([intext])
#print(test_seq)
pad_test_seq = tf.keras.preprocessing.sequence.pad_sequences([test_seq],maxlen=max_length)[0]
#print(pad_test_seq)
res = model.predict(pad_test_seq)
intext += ' '+tokenizer.index_word[res[-1].argmax()]
#print([e.argmax() for e in res])
if tokenizer.index_word[res[-1].argmax()] == '<end>':
break
print('\n',intext)
model.fit(np.array(x),np.array(y),batch_size=64,epochs=20,callbacks=[LambdaCallback(on_epoch_end=test_model)])
!nvidia-smi
test_model(None,None)
"""## Actual Model for image captionning (word)
###Preprocessing
####Import
"""
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
import numpy as np
import pickle
from keras.applications.vgg16 import VGG16
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import load_img
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.applications.vgg16 import preprocess_input
import random
"""#### Variable"""
df_new
image_dataset = df_new['image_name']
image_dataset
comment_dataset = df_new['comment']
comment_dataset
uni_image = image_dataset.unique()
comment_dataset_train=[]
comment_dataset.replace(np.nan, '', regex=True)
for i,e in enumerate(comment_dataset):
try:
comment_dataset_train.append('<start> '+(' '.join([word.lower() for word in e.strip().split(' ') if word.isalpha() ]))+' <end>')
except:
print(e, type(e))
comment_dataset_train.append('<start> '+'nan'+' <end>')
continue
df_new['comment_train'] = comment_dataset_train
df_new
vocab_size = 5000
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=vocab_size,oov_token="<unk>",filters='!"#$%&()*+,-./:;=?@[\\]^_`{|}~\t\n',split=' ', char_level=False)
tokenizer.fit_on_texts(comment_dataset_train)
max_length = 80
vgg = VGG16()
vgg16_extract = Model(inputs=vgg.inputs, outputs=vgg.layers[-2].output)
"""#### Function"""
def load_img_from_path(img):
path = 'flickr30k_images/flickr30k_images/flickr30k_images/'
img = load_img(path+img,target_size=(224,224))
im_arr= img_to_array(img)
im_arr = im_arr.reshape((1, im_arr.shape[0], im_arr.shape[1], im_arr.shape[2]))
im_arr = preprocess_input(im_arr)
predict = vgg16_extract.predict(im_arr,verbose=0)
#print(predict)
return predict[0]
def from_comment_to_data(tokenizer,image,sentences,max_length,vocab_size):
X_image, X_sequence, y = list(), list(), list()
#print(sentences[0:1])
for e in sentences:
sequence = tokenizer.texts_to_sequences([e])
for i,_ in enumerate(sequence[0][1:]):
X_image.append(image)
X_sequence.append(tf.keras.preprocessing.sequence.pad_sequences([sequence[0][:i+1]], maxlen=max_length)[0])
y.append(tf.keras.utils.to_categorical(sequence[0][i+1],vocab_size))
return np.array(X_image),np.array(X_sequence),np.array(y)
def data_generator(df_train,data_image_train,tokenizer,max_length,vocab_size):
while 1:
for image in data_image_train:
sentences = df_train[df_train['image_name'] == image]["comment_train"]
image_vgg = load_img_from_path(image)
X_image, X_sequence, y = from_comment_to_data(tokenizer,image_vgg,sentences,max_length,vocab_size)
yield ((X_sequence,X_image),y)
def inspect_datset_from_path(img_name):
#print('flickr30k_images/flickr30k_images/flickr30k_images/'+img_name)
img =plt.imread('flickr30k_images/flickr30k_images/flickr30k_images/'+img_name)
print('\n',list(df_new[df_new['image_name'] == img_name]["comment_train"])[0:])
plt.imshow(img)
def plot_probability_word(res):
a = np.argsort(res[-1])[::-1]
print(a[:10],res[-1][a][:10])
print(tokenizer.index_word[a[0]])
plt.figure(figsize=(25,10),dpi=40)
plt.bar([i for i in range(10)], res[-1][a][:10])
plt.xticks([i for i in range(10)],[tokenizer.index_word[e] for e in a[:10]],fontsize=30,rotation=30)
plt.show()
def test_model(batch,logs):
image = np.random.choice(uni_image,1)[0]
intext ='<start>'
img = load_img_from_path(image)
image_input = np.array(img).reshape(1,-1)
inspect_datset_from_path(image)
for _ in range(10):
test_seq = tokenizer.texts_to_sequences([intext])
pad_test_seq = tf.keras.preprocessing.sequence.pad_sequences([test_seq],maxlen=max_length)[0]
text = np.array(pad_test_seq).reshape(1,-1)
res = model.predict((text,image_input),verbose=0)
intext += ' '+tokenizer.index_word[res[-1].argmax()]
#a = np.argsort(res[-1])[::-1]
#ra = np.random.choice(a,p=res[0])
#intext +=' '+tokenizer.index_word[ra]
plot_probability_word(res)
'''if tokenizer.index_word[ra] == '<end>':
break'''
if tokenizer.index_word[res[-1].argmax()] == '<end>':
break
print('\n',intext)
np.random.shuffle(uni_image)
generator = data_generator(df_new,uni_image, tokenizer, max_length, vocab_size)
inputs, outputs= next(generator)
print(inputs[0].shape)
print(inputs[1].shape)
print(outputs.shape)
"""#### Test"""
test_model(None,None)
#tokenizer.word_index
num = 6000
print(str(df_new.at[num,'comment_train']))
s= tokenizer.texts_to_sequences([df_new.at[num,'comment_train']])
m = tokenizer.sequences_to_texts(s)
print(m)
#inspect_datset_from_path(uni_image[0])
from google.colab import drive
drive.mount('/content/drive')
"""###Model"""
from tensorflow.keras.layers import Input , LSTM , Embedding,Dense,Dropout,add
from tensorflow.keras.callbacks import LambdaCallback
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import SGD
input1 = Input(shape=(max_length,))
embedding = Embedding(vocab_size,256,mask_zero=False)(input1)
drop = Dropout(0.5)(embedding)
lstm = LSTM(256)(drop)
input2 = Input(shape=(4096,))
drop2 = Dropout(0.5)(input2)
dense = Dense(256, activation='relu')(drop2)
decode = add([dense,lstm])
decode = Dense(256,activation='relu')(decode)
output = Dense(vocab_size,activation='softmax')(decode)
model = Model(inputs=[input1,input2], outputs=output)
model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
model.summary()
tf.keras.utils.plot_model(model, "my_first_model.png",show_shapes=True,show_layer_names=False,dpi=150,expand_nested=False,rankdir='LR')
model.fit(generator,steps_per_epoch=100,epochs = 100,callbacks=[LambdaCallback(on_epoch_end=test_model)])
"""## Actual Model for image captionning (character)
### Import
"""
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
import numpy as np
import pickle
from keras.applications.vgg16 import VGG16
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import load_img
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.applications.vgg16 import preprocess_input
import random
"""### Variable"""
df_new
image_dataset = df_new['image_name']
image_dataset
comment_dataset = df_new['comment']
comment_dataset
uni_image = image_dataset.unique()
comment_dataset_train_character=[]
comment_dataset.replace(np.nan, '', regex=True)
for i,e in enumerate(comment_dataset):
try:
comment_dataset_train_character.append('> '+(' '.join([word.lower() for word in e.strip().split(' ') if word.isalpha()]))+' <')
except:
print(e, type(e))
comment_dataset_train_character.append('> '+'nan'+' <')
continue
df_new['comment_train'] = comment_dataset_train_character
df_new
vocab_size = 31
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=vocab_size,oov_token="<unk>",filters='!"#$%&()*+,-./:;=?@[\\]^_`{|}~\t\n',split=' ', char_level=True)
tokenizer.fit_on_texts(comment_dataset_train_character)
tokenizer.word_index
from functools import reduce
max_length = len(reduce(lambda x,y : x if len(x)>len(y)else y,df_new['comment_train']))
max_length
vgg = VGG16()
vgg16_extract = Model(inputs=vgg.inputs, outputs=vgg.layers[-2].output)
num = 0
print(str(df_new.at[num,'comment_train']))
s= tokenizer.texts_to_sequences([df_new.at[num,'comment_train']])
m = tokenizer.sequences_to_texts(s)
print(len(s[0]))
"""### Function"""
def load_img_from_path(img):
path = 'flickr30k_images/flickr30k_images/flickr30k_images/'
img = load_img(path+img,target_size=(224,224))
im_arr= img_to_array(img)
im_arr = im_arr.reshape((1, im_arr.shape[0], im_arr.shape[1], im_arr.shape[2]))
im_arr = preprocess_input(im_arr)
predict = vgg16_extract.predict(im_arr,verbose=0)
#print(predict)
return predict[0]
def from_comment_to_data(tokenizer,image,sentences,max_length,vocab_size):
X_image, X_sequence, y = list(), list(), list()
for e in sentences:
sequence = tokenizer.texts_to_sequences([e])
for i,_ in enumerate(sequence[0][1:]):
X_image.append(image)
X_sequence.append(tf.keras.preprocessing.sequence.pad_sequences([sequence[0][:i+1]], maxlen=max_length)[0])
y.append(tf.keras.utils.to_categorical(sequence[0][i+1],vocab_size))
return np.array(X_image),np.array(X_sequence),np.array(y)
def data_generator(df_train,data_image_train,tokenizer,max_length,vocab_size):
while 1:
for image in data_image_train:
sentences = df_train[df_train['image_name'] == image]["comment_train"]
image_vgg = load_img_from_path(image)
X_image, X_sequence, y = from_comment_to_data(tokenizer,image_vgg,sentences,max_length,vocab_size)
yield ((X_sequence,X_image),y)
def inspect_datset_from_path(img_name):
#print('flickr30k_images/flickr30k_images/flickr30k_images/'+img_name)
img =plt.imread('flickr30k_images/flickr30k_images/flickr30k_images/'+img_name)
print('\n',list(df_new[df_new['image_name'] == img_name]["comment_train"])[0:])
plt.imshow(img)
def plot_probability_word(res):
a = np.argsort(res[-1])[::-1]
print(a[:10],res[-1][a][:10])
print(tokenizer.index_word[a[0]])
plt.figure(figsize=(25,10),dpi=40)
plt.bar([i for i in range(10)], res[-1][a][:10])
plt.xticks([i for i in range(10)],[tokenizer.index_word[e] for e in a[:10]],fontsize=30,rotation=30)
plt.show()
def test_model(batch,logs):
image = np.random.choice(uni_image,1)[0]
intext ='>'
img = load_img_from_path(image)
image_input = np.array(img).reshape(1,-1)
inspect_datset_from_path(image)
for _ in range(30):
test_seq = tokenizer.texts_to_sequences([intext])
pad_test_seq = tf.keras.preprocessing.sequence.pad_sequences([test_seq],maxlen=max_length)[0]
text = np.array(pad_test_seq).reshape(1,-1)
res = model.predict((text,image_input),verbose=0)
intext += ' '+tokenizer.index_word[res[-1].argmax()]
plot_probability_word(res)
if tokenizer.index_word[res[-1].argmax()] == '<':
break
print('\n',intext)
np.random.shuffle(uni_image)
generator = data_generator(df_new,uni_image, tokenizer, max_length, vocab_size)
inputs, outputs= next(generator)
inputs, outputs= next(generator)
inputs, outputs= next(generator)
print(inputs[0].shape)
print(inputs[1].shape)
print(outputs.shape)
"""### Model"""
from tensorflow.keras.layers import Input , LSTM , Embedding,Dense,Dropout,add
from tensorflow.keras.callbacks import LambdaCallback
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import SGD
input1 = Input(shape=(max_length,))
embedding = Embedding(vocab_size,256,mask_zero=False)(input1)
drop = Dropout(0.5)(embedding)
lstm = LSTM(256)(drop)
input2 = Input(shape=(4096,))
drop2 = Dropout(0.5)(input2)
dense = Dense(256, activation='relu')(drop2)
decode = add([dense,lstm])
decode = Dense(256,activation='relu')(decode)
output = Dense(vocab_size,activation='softmax')(decode)
model = Model(inputs=[input1,input2], outputs=output)
model.compile(loss='categorical_crossentropy', optimizer='adam',metrics=['accuracy'])
model.summary()
tf.keras.utils.plot_model(model, "my_first_model.png",show_shapes=True,show_layer_names=False,dpi=150,expand_nested=False,rankdir='LR')
model.fit(generator,steps_per_epoch=10,epochs = 100,callbacks=[LambdaCallback(on_epoch_end=test_model)])
"""## Image to Caption with Attention"""
| 31.345238
| 162
| 0.732181
| 2,386
| 15,798
| 4.640821
| 0.131182
| 0.012192
| 0.030886
| 0.03793
| 0.812427
| 0.797706
| 0.79301
| 0.773413
| 0.747855
| 0.730154
| 0
| 0.024444
| 0.109191
| 15,798
| 503
| 163
| 31.407555
| 0.762382
| 0.039499
| 0
| 0.727829
| 1
| 0
| 0.075686
| 0.032086
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.140673
| null | null | 0.073395
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
77606358b237743bef602d4d559a5ebd18cbf42b
| 2,087
|
py
|
Python
|
societe/models.py
|
donalawa/gestionimmo
|
b538cb6fd94bf1d6f3b0ce171cdc3511ab7b6b97
|
[
"MIT"
] | null | null | null |
societe/models.py
|
donalawa/gestionimmo
|
b538cb6fd94bf1d6f3b0ce171cdc3511ab7b6b97
|
[
"MIT"
] | null | null | null |
societe/models.py
|
donalawa/gestionimmo
|
b538cb6fd94bf1d6f3b0ce171cdc3511ab7b6b97
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
# class Societe(models.Model):
# nom = models.CharField(max_length=60)
# localisation = models.CharField(max_length=200)
# active = models.BooleanField(default=False)
# date_creation = models.DateField()
# def __str__(self):
# return self.nom
# class Agent(models.Model):
# societe = models.ForeignKey(Societe, on_delete=models.CASCADE)
# login = models.CharField(max_length=20)
# password = models.CharField(max_length=20)
# email = models.EmailField(max_length=60)
# nom = models.CharField(max_length=30)
# prenom = models.CharField(max_length=30)
# statut = models.CharField(max_length=20)
# active = models.BooleanField(default=False)
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
# def __str__(self):
# return self.nom
# class Societe(models.Model):
# nom = models.CharField(max_length=60)
# localisation = models.CharField(max_length=200)
# active = models.BooleanField(default=False)
# date_creation = models.DateTimeField(verbose_name="Date joined",auto_now_add=True)
# def __str__(self):
# return self.nom
# class Agent(models.Model):
# societe = models.ForeignKey(Societe, on_delete=models.CASCADE)
# poste = models.CharField(max_length=15)
# login = models.CharField(max_length=20)
# password = models.CharField(max_length=20)
# email = models.EmailField(max_length=60)
# nom = models.CharField(max_length=30)
# prenom = models.CharField(max_length=30)
# statut = models.CharField(max_length=20)
# active = models.BooleanField(default=False)
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
# def __str__(self):
# return self.nom
| 32.107692
| 91
| 0.633445
| 233
| 2,087
| 5.459227
| 0.236052
| 0.120283
| 0.212264
| 0.283019
| 0.899371
| 0.899371
| 0.899371
| 0.899371
| 0.896226
| 0.896226
| 0
| 0.023226
| 0.257307
| 2,087
| 64
| 92
| 32.609375
| 0.797419
| 0.902731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
0291b1564a2e08f354e3421707aa7ce2c92a48d9
| 21,787
|
py
|
Python
|
odes.py
|
Christian-Offen/BEAConjugateSymplectic
|
f4a164cf878d784e90caca54b576b8610ecb8d54
|
[
"MIT"
] | null | null | null |
odes.py
|
Christian-Offen/BEAConjugateSymplectic
|
f4a164cf878d784e90caca54b576b8610ecb8d54
|
[
"MIT"
] | null | null | null |
odes.py
|
Christian-Offen/BEAConjugateSymplectic
|
f4a164cf878d784e90caca54b576b8610ecb8d54
|
[
"MIT"
] | null | null | null |
import numpy as np
def ode0(z,UJet,a):
# first order formulation of ODE
# z = [u0,u1,ud0,ud1]
u = z[:2]
ud = z[2:]
U = np.zeros((2,2))
U[1,0] = UJet[1][0](u)
U[0,1] = UJet[0][1](u)
r = np.array([U[1,0],U[0,1]])
return np.concatenate([ud,r])
def ode2(z,UJet,a,h):
# first order formulation of modified ODE 2
u = z[:2]
ud = z[2:]
U = np.zeros((4,4))
U[1,0] = UJet[1][0](u)
U[0,1] = UJet[0][1](u)
U[0,2] = UJet[0][2](u)
U[0,3] = UJet[0][3](u)
U[1,1] = UJet[1][1](u)
U[1,2] = UJet[1][2](u)
U[2,1] = UJet[2][1](u)
U[3,0] = UJet[3][0](u)
xd1 = ud[0]
xd2 = ud[1]
rhs = np.array([(3*a[1,2]*(U[0,1]*U[0,2] + xd2**2*U[0,3] + U[1,0]*U[1,1] + 2*xd1*xd2*U[1,2] + xd1**2*U[2,1]) +
(-4 + 3*a[1,1])*(U[0,1]*U[1,1] + xd2**2*U[1,2] + U[1,0]*U[2,0] + 2*xd1*xd2*U[2,1] + xd1**2*U[3,0]))/12.,
(-4*(U[0,1]*U[0,2] + xd2**2*U[0,3] + U[1,0]*U[1,1] + 2*xd1*xd2*U[1,2] + xd1**2*U[2,1]) +
3*a[2,2]*(U[0,1]*U[0,2] + xd2**2*U[0,3] + U[1,0]*U[1,1] + 2*xd1*xd2*U[1,2] + xd1**2*U[2,1]) +
3*a[1,2]*(U[0,1]*U[1,1] + xd2**2*U[1,2] + U[1,0]*U[2,0] + 2*xd1*xd2*U[2,1] + xd1**2*U[3,0]))/12.])
r = ode0(z,UJet,a)[2:] + h**2*rhs
return np.concatenate([ud,r])
def ode4(z,UJet,a,h):
# first order formulation of modified ODE 4
u = z[:2]
ud = z[2:]
U = np.zeros((6,6))
U[0,1] = UJet[0][1](u)
U[0,2] = UJet[0][2](u)
U[0,3] = UJet[0][3](u)
U[0,4] = UJet[0][4](u)
U[0,5] = UJet[0][5](u)
U[1,0] = UJet[1][0](u)
U[1,1] = UJet[1][1](u)
U[1,2] = UJet[1][2](u)
U[1,3] = UJet[1][3](u)
U[1,4] = UJet[1][4](u)
U[2,0] = UJet[2][0](u)
U[2,1] = UJet[2][1](u)
U[2,2] = UJet[2][2](u)
U[2,3] = UJet[2][3](u)
U[3,0] = UJet[3][0](u)
U[3,1] = UJet[3][1](u)
U[3,2] = UJet[3][2](u)
U[4,0] = UJet[4][0](u)
U[4,1] = UJet[4][1](u)
U[5,0] = UJet[5][0](u)
xd1 = ud[0]
xd2 = ud[1]
rhs = np.array([(128*xd2**2*U[0,3]*U[1,1] - 150*xd2**2*a[1,1]*U[0,3]*U[1,1] + 45*xd2**2*a[1,1]**2*U[0,3]*U[1,1] - 60*xd2**2*a[2,2]*U[0,3]*U[1,1] +
45*xd2**2*a[1,1]*a[2,2]*U[0,3]*U[1,1] + 128*U[1,0]*U[1,1]**2 - 150*a[1,1]*U[1,0]*U[1,1]**2 + 45*a[1,1]**2*U[1,0]*U[1,1]**2 - 60*a[2,2]*U[1,0]*U[1,1]**2 +
45*a[1,1]*a[2,2]*U[1,0]*U[1,1]**2 + 9*(16 - 30*a[1,1] + 15*a[1,1]**2)*U[0,1]**2*U[1,2] + 192*xd2**2*U[0,2]*U[1,2] - 360*xd2**2*a[1,1]*U[0,2]*U[1,2] +
180*xd2**2*a[1,1]**2*U[0,2]*U[1,2] + 448*xd1*xd2*U[1,1]*U[1,2] - 660*xd1*xd2*a[1,1]*U[1,1]*U[1,2] + 270*xd1*xd2*a[1,1]**2*U[1,1]*U[1,2] -
120*xd1*xd2*a[2,2]*U[1,1]*U[1,2] + 90*xd1*xd2*a[1,1]*a[2,2]*U[1,1]*U[1,2] + 48*xd2**4*U[1,4] - 90*xd2**4*a[1,1]*U[1,4] + 45*xd2**4*a[1,1]**2*U[1,4] +
128*xd2**2*U[1,2]*U[2,0] - 210*xd2**2*a[1,1]*U[1,2]*U[2,0] + 90*xd2**2*a[1,1]**2*U[1,2]*U[2,0] + 128*U[1,0]*U[2,0]**2 - 210*a[1,1]*U[1,0]*U[2,0]**2 +
90*a[1,1]**2*U[1,0]*U[2,0]**2 + 192*xd1*xd2*U[0,2]*U[2,1] - 360*xd1*xd2*a[1,1]*U[0,2]*U[2,1] + 180*xd1*xd2*a[1,1]**2*U[0,2]*U[2,1] +
320*xd1**2*U[1,1]*U[2,1] + 192*xd2**2*U[1,1]*U[2,1] - 510*xd1**2*a[1,1]*U[1,1]*U[2,1] - 360*xd2**2*a[1,1]*U[1,1]*U[2,1] +
225*xd1**2*a[1,1]**2*U[1,1]*U[2,1] + 180*xd2**2*a[1,1]**2*U[1,1]*U[2,1] - 60*xd1**2*a[2,2]*U[1,1]*U[2,1] + 45*xd1**2*a[1,1]*a[2,2]*U[1,1]*U[2,1] +
448*xd1*xd2*U[2,0]*U[2,1] - 780*xd1*xd2*a[1,1]*U[2,0]*U[2,1] + 360*xd1*xd2*a[1,1]**2*U[2,0]*U[2,1] + 288*xd2**2*U[1,0]*U[2,2] -
540*xd2**2*a[1,1]*U[1,0]*U[2,2] + 270*xd2**2*a[1,1]**2*U[1,0]*U[2,2] + 192*xd1*xd2**3*U[2,3] - 360*xd1*xd2**3*a[1,1]*U[2,3] +
180*xd1*xd2**3*a[1,1]**2*U[2,3] + 144*U[1,0]**2*U[3,0] - 270*a[1,1]*U[1,0]**2*U[3,0] + 135*a[1,1]**2*U[1,0]**2*U[3,0] + 192*xd1*xd2*U[1,1]*U[3,0] -
360*xd1*xd2*a[1,1]*U[1,1]*U[3,0] + 180*xd1*xd2*a[1,1]**2*U[1,1]*U[3,0] + 320*xd1**2*U[2,0]*U[3,0] - 570*xd1**2*a[1,1]*U[2,0]*U[3,0] +
270*xd1**2*a[1,1]**2*U[2,0]*U[3,0] + 576*xd1*xd2*U[1,0]*U[3,1] - 1080*xd1*xd2*a[1,1]*U[1,0]*U[3,1] + 540*xd1*xd2*a[1,1]**2*U[1,0]*U[3,1] +
U[0,1]*((128 + 45*a[1,1]**2 - 60*a[2,2] + 15*a[1,1]*(-10 + 3*a[2,2]))*U[0,2]*U[1,1] +
2*(9*xd2**2*(16 - 30*a[1,1] + 15*a[1,1]**2)*U[1,3] + (64 - 105*a[1,1] + 45*a[1,1]**2)*U[1,1]*U[2,0] +
18*xd1*xd2*(16 - 30*a[1,1] + 15*a[1,1]**2)*U[2,2] + 9*(16 - 30*a[1,1] + 15*a[1,1]**2)*(U[1,0]*U[2,1] + xd1**2*U[3,1]))) + 288*xd1**2*xd2**2*U[3,2] -
540*xd1**2*xd2**2*a[1,1]*U[3,2] + 270*xd1**2*xd2**2*a[1,1]**2*U[3,2] + 288*xd1**2*U[1,0]*U[4,0] - 540*xd1**2*a[1,1]*U[1,0]*U[4,0] +
270*xd1**2*a[1,1]**2*U[1,0]*U[4,0] + 192*xd1**3*xd2*U[4,1] - 360*xd1**3*xd2*a[1,1]*U[4,1] + 180*xd1**3*xd2*a[1,1]**2*U[4,1] +
15*a[1,2]*(9*(-2 + a[1,1] + a[2,2])*U[0,1]**2*U[0,3] + 3*xd2**4*(-2 + a[1,1] + a[2,2])*U[0,5] - 10*U[0,2]*U[1,0]*U[1,1] + 3*a[1,1]*U[0,2]*U[1,0]*U[1,1] +
6*a[2,2]*U[0,2]*U[1,0]*U[1,1] - 24*xd1**2*U[1,1]*U[1,2] + 12*xd1**2*a[1,1]*U[1,1]*U[1,2] + 12*xd1**2*a[2,2]*U[1,1]*U[1,2] +
12*xd1*xd2**3*(-2 + a[1,1] + a[2,2])*U[1,4] - 18*U[1,0]*U[1,1]*U[2,0] + 12*a[1,1]*U[1,0]*U[1,1]*U[2,0] + 3*a[2,2]*U[1,0]*U[1,1]*U[2,0] -
10*xd1**2*U[0,2]*U[2,1] + 3*xd1**2*a[1,1]*U[0,2]*U[2,1] + 6*xd1**2*a[2,2]*U[0,2]*U[2,1] - 18*U[1,0]**2*U[2,1] + 9*a[1,1]*U[1,0]**2*U[2,1] +
9*a[2,2]*U[1,0]**2*U[2,1] - 28*xd1**2*U[2,0]*U[2,1] + 15*xd1**2*a[1,1]*U[2,0]*U[2,1] + 12*xd1**2*a[2,2]*U[2,0]*U[2,1] +
U[0,1]*((-10 + 3*a[1,1] + 6*a[2,2])*U[0,2]**2 + 18*xd2**2*(-2 + a[1,1] + a[2,2])*U[0,4] - 14*U[1,1]**2 + 9*a[1,1]*U[1,1]**2 + 3*a[2,2]*U[1,1]**2 -
36*U[1,0]*U[1,2] + 18*a[1,1]*U[1,0]*U[1,2] + 18*a[2,2]*U[1,0]*U[1,2] + 36*xd1*xd2*(-2 + a[1,1] + a[2,2])*U[1,3] + (-4 + 3*a[1,1])*U[0,2]*U[2,0] -
36*xd1**2*U[2,2] + 18*xd1**2*a[1,1]*U[2,2] + 18*xd1**2*a[2,2]*U[2,2]) +
xd2**2*((-34 + 15*a[1,1] + 18*a[2,2])*U[0,2]*U[0,3] + (-38 + 21*a[1,1] + 15*a[2,2])*U[1,1]*U[1,2] - 36*U[1,0]*U[1,3] + 18*a[1,1]*U[1,0]*U[1,3] +
18*a[2,2]*U[1,0]*U[1,3] - 4*U[0,3]*U[2,0] + 3*a[1,1]*U[0,3]*U[2,0] - 36*xd1**2*U[2,3] + 18*xd1**2*a[1,1]*U[2,3] + 18*xd1**2*a[2,2]*U[2,3]) -
14*xd1**2*U[1,1]*U[3,0] + 9*xd1**2*a[1,1]*U[1,1]*U[3,0] + 3*xd1**2*a[2,2]*U[1,1]*U[3,0] - 36*xd1**2*U[1,0]*U[3,1] + 18*xd1**2*a[1,1]*U[1,0]*U[3,1] +
18*xd1**2*a[2,2]*U[1,0]*U[3,1] + 2*xd1*xd2*(6*(-2 + a[1,1] + a[2,2])*U[0,3]*U[1,1] + (-22 + 9*a[1,1] + 12*a[2,2])*U[0,2]*U[1,2] - 16*U[1,2]*U[2,0] +
9*a[1,1]*U[1,2]*U[2,0] + 6*a[2,2]*U[1,2]*U[2,0] - 26*U[1,1]*U[2,1] + 15*a[1,1]*U[1,1]*U[2,1] + 9*a[2,2]*U[1,1]*U[2,1] - 36*U[1,0]*U[2,2] +
18*a[1,1]*U[1,0]*U[2,2] + 18*a[2,2]*U[1,0]*U[2,2] - 12*xd1**2*U[3,2] + 6*xd1**2*a[1,1]*U[3,2] + 6*xd1**2*a[2,2]*U[3,2]) - 6*xd1**4*U[4,1] +
3*xd1**4*a[1,1]*U[4,1] + 3*xd1**4*a[2,2]*U[4,1]) + 48*xd1**4*U[5,0] - 90*xd1**4*a[1,1]*U[5,0] + 45*xd1**4*a[1,1]**2*U[5,0] +
45*a[1,2]**2*(2*U[1,0]*U[1,1]**2 + 3*U[0,1]**2*U[1,2] + xd2**4*U[1,4] + U[0,2]*U[1,0]*U[2,0] + U[1,0]*U[2,0]**2 + 6*xd1**2*U[1,1]*U[2,1] +
4*xd1*xd2**3*U[2,3] + xd1**2*U[0,2]*U[3,0] + 3*U[1,0]**2*U[3,0] + 5*xd1**2*U[2,0]*U[3,0] +
U[0,1]*(3*U[0,2]*U[1,1] + 6*xd2**2*U[1,3] + U[1,1]*U[2,0] + 6*U[1,0]*U[2,1] + 12*xd1*xd2*U[2,2] + 6*xd1**2*U[3,1]) +
xd2**2*(2*U[0,3]*U[1,1] + 5*U[0,2]*U[1,2] + U[1,2]*U[2,0] + 4*U[1,1]*U[2,1] + 6*U[1,0]*U[2,2] + 6*xd1**2*U[3,2]) + 6*xd1**2*U[1,0]*U[4,0] +
2*xd1*xd2*(3*U[0,2]*U[2,1] + 3*U[2,0]*U[2,1] + 2*U[1,1]*(2*U[1,2] + U[3,0]) + 6*U[1,0]*U[3,1] + 2*xd1**2*U[4,1]) + xd1**4*U[5,0]))/720.,
(128*U[0,2]*U[1,0]*U[1,1] + 45*a[1,2]**2*U[0,2]*U[1,0]*U[1,1] - 210*a[2,2]*U[0,2]*U[1,0]*U[1,1] + 90*a[2,2]**2*U[0,2]*U[1,0]*U[1,1] -
210*a[1,2]*U[1,0]*U[1,1]**2 + 45*a[1,1]*a[1,2]*U[1,0]*U[1,1]**2 + 135*a[1,2]*a[2,2]*U[1,0]*U[1,1]**2 + 192*xd1**2*U[1,1]*U[1,2] +
180*xd1**2*a[1,2]**2*U[1,1]*U[1,2] - 360*xd1**2*a[2,2]*U[1,1]*U[1,2] + 180*xd1**2*a[2,2]**2*U[1,1]*U[1,2] +
9*U[0,1]**2*((16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[0,3] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,2]) +
xd2**4*((48 + 45*a[1,2]**2 - 90*a[2,2] + 45*a[2,2]**2)*U[0,5] + 45*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,4]) - 60*a[1,2]*U[0,2]*U[1,0]*U[2,0] +
45*a[1,2]*a[2,2]*U[0,2]*U[1,0]*U[2,0] + 128*U[1,0]*U[1,1]*U[2,0] - 60*a[1,1]*U[1,0]*U[1,1]*U[2,0] + 135*a[1,2]**2*U[1,0]*U[1,1]*U[2,0] -
150*a[2,2]*U[1,0]*U[1,1]*U[2,0] + 45*a[1,1]*a[2,2]*U[1,0]*U[1,1]*U[2,0] + 45*a[2,2]**2*U[1,0]*U[1,1]*U[2,0] - 150*a[1,2]*U[1,0]*U[2,0]**2 +
90*a[1,1]*a[1,2]*U[1,0]*U[2,0]**2 + 45*a[1,2]*a[2,2]*U[1,0]*U[2,0]**2 + 128*xd1**2*U[0,2]*U[2,1] + 45*xd1**2*a[1,2]**2*U[0,2]*U[2,1] -
210*xd1**2*a[2,2]*U[0,2]*U[2,1] + 90*xd1**2*a[2,2]**2*U[0,2]*U[2,1] + 144*U[1,0]**2*U[2,1] + 135*a[1,2]**2*U[1,0]**2*U[2,1] - 270*a[2,2]*U[1,0]**2*U[2,1] +
135*a[2,2]**2*U[1,0]**2*U[2,1] - 570*xd1**2*a[1,2]*U[1,1]*U[2,1] + 225*xd1**2*a[1,1]*a[1,2]*U[1,1]*U[2,1] + 315*xd1**2*a[1,2]*a[2,2]*U[1,1]*U[2,1] +
192*xd1**2*U[2,0]*U[2,1] + 225*xd1**2*a[1,2]**2*U[2,0]*U[2,1] - 360*xd1**2*a[2,2]*U[2,0]*U[2,1] + 180*xd1**2*a[2,2]**2*U[2,0]*U[2,1] +
12*xd1*xd2**3*((16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[1,4] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*U[2,3]) - 60*xd1**2*a[1,2]*U[0,2]*U[3,0] +
45*xd1**2*a[1,2]*a[2,2]*U[0,2]*U[3,0] - 270*a[1,2]*U[1,0]**2*U[3,0] + 135*a[1,1]*a[1,2]*U[1,0]**2*U[3,0] + 135*a[1,2]*a[2,2]*U[1,0]**2*U[3,0] +
128*xd1**2*U[1,1]*U[3,0] - 60*xd1**2*a[1,1]*U[1,1]*U[3,0] + 90*xd1**2*a[1,2]**2*U[1,1]*U[3,0] - 150*xd1**2*a[2,2]*U[1,1]*U[3,0] +
45*xd1**2*a[1,1]*a[2,2]*U[1,1]*U[3,0] + 45*xd1**2*a[2,2]**2*U[1,1]*U[3,0] - 510*xd1**2*a[1,2]*U[2,0]*U[3,0] + 270*xd1**2*a[1,1]*a[1,2]*U[2,0]*U[3,0] +
225*xd1**2*a[1,2]*a[2,2]*U[2,0]*U[3,0] + 288*xd1**2*U[1,0]*U[3,1] + 270*xd1**2*a[1,2]**2*U[1,0]*U[3,1] - 540*xd1**2*a[2,2]*U[1,0]*U[3,1] +
270*xd1**2*a[2,2]**2*U[1,0]*U[3,1] + U[0,1]*((128 + 45*a[1,2]**2 - 210*a[2,2] + 90*a[2,2]**2)*U[0,2]**2 + 128*U[1,1]**2 - 60*a[1,1]*U[1,1]**2 +
90*a[1,2]**2*U[1,1]**2 - 150*a[2,2]*U[1,1]**2 + 45*a[1,1]*a[2,2]*U[1,1]**2 + 45*a[2,2]**2*U[1,1]**2 + 288*U[1,0]*U[1,2] + 270*a[1,2]**2*U[1,0]*U[1,2] -
540*a[2,2]*U[1,0]*U[1,2] + 270*a[2,2]**2*U[1,0]*U[1,2] +
18*xd2**2*((16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[0,4] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,3]) - 150*a[1,2]*U[1,1]*U[2,0] +
90*a[1,1]*a[1,2]*U[1,1]*U[2,0] + 45*a[1,2]*a[2,2]*U[1,1]*U[2,0] + 45*a[1,2]*U[0,2]*((-6 + a[1,1] + 4*a[2,2])*U[1,1] + a[1,2]*U[2,0]) -
540*a[1,2]*U[1,0]*U[2,1] + 270*a[1,1]*a[1,2]*U[1,0]*U[2,1] + 270*a[1,2]*a[2,2]*U[1,0]*U[2,1] + 288*xd1**2*U[2,2] + 270*xd1**2*a[1,2]**2*U[2,2] -
540*xd1**2*a[2,2]*U[2,2] + 270*xd1**2*a[2,2]**2*U[2,2] +
36*xd1*xd2*((16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[1,3] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*U[2,2]) - 540*xd1**2*a[1,2]*U[3,1] +
270*xd1**2*a[1,1]*a[1,2]*U[3,1] + 270*xd1**2*a[1,2]*a[2,2]*U[3,1]) +
xd2**2*(320*U[1,1]*U[1,2] - 60*a[1,1]*U[1,1]*U[1,2] - 510*a[2,2]*U[1,1]*U[1,2] + 45*a[1,1]*a[2,2]*U[1,1]*U[1,2] + 225*a[2,2]**2*U[1,1]*U[1,2] +
5*U[0,2]*((64 + 45*a[1,2]**2 - 114*a[2,2] + 54*a[2,2]**2)*U[0,3] + 3*a[1,2]*(-28 + 12*a[1,1] + 15*a[2,2])*U[1,2]) + 288*U[1,0]*U[1,3] -
540*a[2,2]*U[1,0]*U[1,3] + 270*a[2,2]**2*U[1,0]*U[1,3] + 288*xd1**2*U[2,3] - 540*xd1**2*a[2,2]*U[2,3] + 270*xd1**2*a[2,2]**2*U[2,3] +
45*a[1,2]**2*(6*U[1,1]*U[1,2] + 6*U[1,0]*U[1,3] + U[0,3]*U[2,0] + 6*xd1**2*U[2,3]) +
15*a[1,2]*((-14 + 3*a[1,1] + 9*a[2,2])*U[0,3]*U[1,1] + (-10 + 6*a[1,1] + 3*a[2,2])*U[1,2]*U[2,0] +
6*(-2 + a[1,1] + a[2,2])*(2*U[1,1]*U[2,1] + 3*U[1,0]*U[2,2] + 3*xd1**2*U[3,2]))) - 540*xd1**2*a[1,2]*U[1,0]*U[4,0] +
270*xd1**2*a[1,1]*a[1,2]*U[1,0]*U[4,0] + 270*xd1**2*a[1,2]*a[2,2]*U[1,0]*U[4,0] + 48*xd1**4*U[4,1] + 45*xd1**4*a[1,2]**2*U[4,1] - 90*xd1**4*a[2,2]*U[4,1] +
45*xd1**4*a[2,2]**2*U[4,1] + 2*xd1*xd2*(6*(16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[0,3]*U[1,1] - 390*a[1,2]*U[1,1]*U[1,2] +
135*a[1,1]*a[1,2]*U[1,1]*U[1,2] + 225*a[1,2]*a[2,2]*U[1,1]*U[1,2] + 96*U[1,2]*U[2,0] + 135*a[1,2]**2*U[1,2]*U[2,0] - 180*a[2,2]*U[1,2]*U[2,0] +
90*a[2,2]**2*U[1,2]*U[2,0] + 224*U[1,1]*U[2,1] - 60*a[1,1]*U[1,1]*U[2,1] + 180*a[1,2]**2*U[1,1]*U[2,1] - 330*a[2,2]*U[1,1]*U[2,1] +
45*a[1,1]*a[2,2]*U[1,1]*U[2,1] + 135*a[2,2]**2*U[1,1]*U[2,1] - 330*a[1,2]*U[2,0]*U[2,1] + 180*a[1,1]*a[1,2]*U[2,0]*U[2,1] +
135*a[1,2]*a[2,2]*U[2,0]*U[2,1] + U[0,2]*((224 + 135*a[1,2]**2 - 390*a[2,2] + 180*a[2,2]**2)*U[1,2] + 15*a[1,2]*(-16 + 6*a[1,1] + 9*a[2,2])*U[2,1]) +
288*U[1,0]*U[2,2] + 270*a[1,2]**2*U[1,0]*U[2,2] - 540*a[2,2]*U[1,0]*U[2,2] + 270*a[2,2]**2*U[1,0]*U[2,2] - 180*a[1,2]*U[1,1]*U[3,0] +
90*a[1,1]*a[1,2]*U[1,1]*U[3,0] + 90*a[1,2]*a[2,2]*U[1,1]*U[3,0] - 540*a[1,2]*U[1,0]*U[3,1] + 270*a[1,1]*a[1,2]*U[1,0]*U[3,1] +
270*a[1,2]*a[2,2]*U[1,0]*U[3,1] + 96*xd1**2*U[3,2] + 90*xd1**2*a[1,2]**2*U[3,2] - 180*xd1**2*a[2,2]*U[3,2] + 90*xd1**2*a[2,2]**2*U[3,2] -
180*xd1**2*a[1,2]*U[4,1] + 90*xd1**2*a[1,1]*a[1,2]*U[4,1] + 90*xd1**2*a[1,2]*a[2,2]*U[4,1]) - 90*xd1**4*a[1,2]*U[5,0] + 45*xd1**4*a[1,1]*a[1,2]*U[5,0] +
45*xd1**4*a[1,2]*a[2,2]*U[5,0])/720.])
r = ode2(z,UJet,a,h)[2:] + h**4*rhs
return np.concatenate([ud,r])
def Hmod(x,UJet,a,h):
dx = (-x[4:]+8*x[3:-1]-8*x[1:-3]+x[:-4])/(12*h) # fourth oder finite differences
dnrm2 = np.sum(dx**2,1)
nrm2 = np.sum(x[2:-2]**2,1)
u = np.transpose(x[2:-2])
U = np.zeros((6,6,u.shape[1]))
U[0,0] = UJet[0][0](u)
U[0,1] = UJet[0][1](u)
U[0,2] = UJet[0][2](u)
U[0,3] = UJet[0][3](u)
U[0,4] = UJet[0][4](u)
U[0,5] = UJet[0][5](u)
U[1,0] = UJet[1][0](u)
U[1,1] = UJet[1][1](u)
U[1,2] = UJet[1][2](u)
U[1,3] = UJet[1][3](u)
U[1,4] = UJet[1][4](u)
U[2,0] = UJet[2][0](u)
U[2,1] = UJet[2][1](u)
U[2,2] = UJet[2][2](u)
U[2,3] = UJet[2][3](u)
U[3,0] = UJet[3][0](u)
U[3,1] = UJet[3][1](u)
U[3,2] = UJet[3][2](u)
U[4,0] = UJet[4][0](u)
U[4,1] = UJet[4][1](u)
U[5,0] = UJet[5][0](u)
xd1 = dx[:,0]
xd2 = dx[:,1]
H0 = 1/2*dnrm2 - U[0,0]
H2diff=((-4 + 3*a[2,2])*U[0,1]**2 + 8*xd2**2*U[0,2] - 6*xd2**2*a[2,2]*U[0,2] + 6*a[1,2]*U[0,1]*U[1,0] - 4*U[1,0]**2 + 3*a[1,1]*U[1,0]**2 - 6*xd2**2*a[1,2]*U[1,1] +
xd1**2*(-6*a[1,2]*U[1,1] + 2*(4 - 3*a[1,1])*U[2,0]) - 2*xd1*xd2*(3*a[1,2]*U[0,2] + (-8 + 3*a[1,1] + 3*a[2,2])*U[1,1] + 3*a[1,2]*U[2,0]))/24.
H2 = H0+h**2*H2diff
H4diff=(90*xd1**2*a[1,2]*U[0,2]*U[1,1] - 45*xd1**2*a[1,1]*a[1,2]*U[0,2]*U[1,1] - 45*xd1**2*a[1,2]*a[2,2]*U[0,2]*U[1,1] - 90*a[1,2]*U[1,0]**2*U[1,1] +
45*a[1,1]*a[1,2]*U[1,0]**2*U[1,1] + 45*a[1,2]*a[2,2]*U[1,0]**2*U[1,1] - 32*xd1**2*U[1,1]**2 + 90*xd1**2*a[1,1]*U[1,1]**2 - 45*xd1**2*a[1,1]**2*U[1,1]**2 -
45*xd1**2*a[1,2]**2*U[1,1]**2 - 15*xd1**2*a[2,2]*U[1,1]**2 +
U[0,1]**2*((48 + 45*a[1,2]**2 - 90*a[2,2] + 45*a[2,2]**2)*U[0,2] + 45*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,1]) -
3*xd2**4*((16 + 15*a[1,2]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[0,4] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,3]) + 48*U[1,0]**2*U[2,0] -
90*a[1,1]*U[1,0]**2*U[2,0] + 45*a[1,1]**2*U[1,0]**2*U[2,0] + 45*a[1,2]**2*U[1,0]**2*U[2,0] + 60*xd1**2*a[1,2]*U[1,1]*U[2,0] -
45*xd1**2*a[1,1]*a[1,2]*U[1,1]*U[2,0] - 45*xd1**2*a[1,2]*a[2,2]*U[1,1]*U[2,0] - 32*xd1**2*U[2,0]**2 + 75*xd1**2*a[1,1]*U[2,0]**2 -
45*xd1**2*a[1,1]**2*U[2,0]**2 - 45*xd1**2*a[1,2]**2*U[2,0]**2 + 180*xd1**2*a[1,2]*U[1,0]*U[2,1] - 90*xd1**2*a[1,1]*a[1,2]*U[1,0]*U[2,1] -
90*xd1**2*a[1,2]*a[2,2]*U[1,0]*U[2,1] - 3*xd1*xd2**3*(60*a[1,2]**2*U[1,3] + (64 - 30*a[1,1] + 15*a[1,1]**2 - 90*a[2,2] + 45*a[2,2]**2)*U[1,3] +
15*a[1,2]*(-2 + a[1,1] + a[2,2])*(U[0,4] + 3*U[2,2])) - 96*xd1**2*U[1,0]*U[3,0] + 180*xd1**2*a[1,1]*U[1,0]*U[3,0] - 90*xd1**2*a[1,1]**2*U[1,0]*U[3,0] -
90*xd1**2*a[1,2]**2*U[1,0]*U[3,0] - 3*U[0,1]*(-32*U[1,0]*U[1,1] + 30*a[1,1]*U[1,0]*U[1,1] - 15*a[1,1]**2*U[1,0]*U[1,1] + 30*a[2,2]*U[1,0]*U[1,1] -
15*a[2,2]**2*U[1,0]*U[1,1] + xd2**2*((32 + 30*a[1,2]**2 - 60*a[2,2] + 30*a[2,2]**2)*U[0,3] + 30*a[1,2]*(-2 + a[1,1] + a[2,2])*U[1,2]) +
32*xd1**2*U[2,1] - 90*xd1**2*a[1,1]*U[2,1] + 45*xd1**2*a[1,1]**2*U[2,1] + 30*xd1**2*a[2,2]*U[2,1] - 15*xd1**2*a[2,2]**2*U[2,1] -
30*a[1,2]**2*(U[1,0]*U[1,1] - xd1**2*U[2,1]) + xd1*xd2*
(60*a[1,2]**2*U[1,2] + (64 - 90*a[1,1] + 45*a[1,1]**2 - 30*a[2,2] + 15*a[2,2]**2)*U[1,2] + 15*a[1,2]*(-2 + a[1,1] + a[2,2])*(3*U[0,3] + U[2,1])) -
15*a[1,2]*(-2 + a[1,1] + a[2,2])*(U[0,2]*U[1,0] + U[1,0]*U[2,0] + xd1**2*(-3*U[1,2] + U[3,0]))) + 90*xd1**4*a[1,2]*U[3,1] -
45*xd1**4*a[1,1]*a[1,2]*U[3,1] - 45*xd1**4*a[1,2]*a[2,2]*U[3,1] -
xd2**2*((32 + 45*a[1,2]**2 - 75*a[2,2] + 45*a[2,2]**2)*U[0,2]**2 + 15*a[1,2]*(-4 + 3*a[1,1] + 3*a[2,2])*U[0,2]*U[1,1] + 32*U[1,1]**2 + 15*a[1,1]*U[1,1]**2 -
90*a[2,2]*U[1,1]**2 + 45*a[2,2]**2*U[1,1]**2 + 96*U[1,0]*U[1,2] + 90*a[1,1]*U[1,0]*U[1,2] - 45*a[1,1]**2*U[1,0]*U[1,2] - 270*a[2,2]*U[1,0]*U[1,2] +
135*a[2,2]**2*U[1,0]*U[1,2] + 288*xd1**2*U[2,2] - 270*xd1**2*a[1,1]*U[2,2] + 135*xd1**2*a[1,1]**2*U[2,2] - 270*xd1**2*a[2,2]*U[2,2] +
135*xd1**2*a[2,2]**2*U[2,2] + 45*a[1,2]**2*(U[1,1]**2 + 2*U[1,0]*U[1,2] + 6*xd1**2*U[2,2]) -
45*a[1,2]*(-2 + a[1,1] + a[2,2])*(U[0,3]*U[1,0] - U[1,1]*U[2,0] - 3*U[1,0]*U[2,1] - 3*xd1**2*(U[1,3] + U[3,1]))) - 48*xd1**4*U[4,0] +
90*xd1**4*a[1,1]*U[4,0] - 45*xd1**4*a[1,1]**2*U[4,0] - 45*xd1**4*a[1,2]**2*U[4,0] -
xd1*xd2*(64*U[0,2]*U[1,1] - 90*a[1,1]*U[0,2]*U[1,1] + 45*a[1,1]**2*U[0,2]*U[1,1] - 60*a[2,2]*U[0,2]*U[1,1] + 45*a[2,2]**2*U[0,2]*U[1,1] + 64*U[1,1]*U[2,0] -
60*a[1,1]*U[1,1]*U[2,0] + 45*a[1,1]**2*U[1,1]*U[2,0] - 90*a[2,2]*U[1,1]*U[2,0] + 45*a[2,2]**2*U[1,1]*U[2,0] + 192*U[1,0]*U[2,1] -
90*a[1,1]*U[1,0]*U[2,1] + 45*a[1,1]**2*U[1,0]*U[2,1] - 270*a[2,2]*U[1,0]*U[2,1] + 135*a[2,2]**2*U[1,0]*U[2,1] + 192*xd1**2*U[3,1] -
270*xd1**2*a[1,1]*U[3,1] + 135*xd1**2*a[1,1]**2*U[3,1] - 90*xd1**2*a[2,2]*U[3,1] + 45*xd1**2*a[2,2]**2*U[3,1] +
90*a[1,2]**2*(U[0,2]*U[1,1] + U[1,1]*U[2,0] + 2*U[1,0]*U[2,1] + 2*xd1**2*U[3,1]) +
15*a[1,2]*(3*(-2 + a[1,1] + a[2,2])*U[0,2]**2 + 2*(-5 + 3*a[1,1] + 3*a[2,2])*U[1,1]**2 + 2*U[0,2]*U[2,0] +
3*(-2 + a[1,1] + a[2,2])*(U[2,0]**2 + U[1,0]*(U[1,2] + 3*U[3,0]) + xd1**2*(3*U[2,2] + U[4,0])))))/720
H4 = H2+h**4*H4diff
return H0, H2, H4
def Imod(x,VJet,a,h):
# computation of modified angular momentum if U=1/2V(#1,#2) and the matrix coefficients A commute with a rotation matrix
dx = (-x[4:]+8*x[3:-1]-8*x[1:-3]+x[:-4])/(12*h) # fourth oder finite differences
nrm2 = np.sum(x[2:-2]**2,1)
V1 = VJet[1](nrm2)
V2 = VJet[2](nrm2)
V3 = VJet[3](nrm2)
V4 = VJet[4](nrm2)
xd1 = dx[:,0]
xd2 = dx[:,1]
x1 = x[2:-2,0]
x2 = x[2:-2,1]
# I0
I0 = -(x2*xd1) + x1*xd2
# I2
I2diff= (x1*(-12*xd1*a[1,2] + xd2*(8 + 3*a[1,1] - 9*a[2,2]))*V1 - 6*x1**3*xd1*a[1,2]*V2 + 6*x2**3*xd2*a[1,2]*V2 +
6*x1*x2**2*(xd1*a[1,2] + xd2*(a[1,1] - a[2,2]))*V2 + x2*
((12*xd2*a[1,2] + xd1*(-8 + 9*a[1,1] - 3*a[2,2]))*V1 + 6*x1**2*(-(xd2*a[1,2]) + xd1*(a[1,1] - a[2,2]))*V2))/12.
I2 = I0+h**2*I2diff
# I4
I4diff = (180*x2**5*xd2*a[1,2]*(-2 + a[1,1] + a[2,2])*(V2**2 + 3*V1*V3 + 2*xd2**2*V4) +
180*x1*x2**4*(-2 + a[1,1] + a[2,2])*(xd2*(a[1,1] - a[2,2])*(V2**2 + 3*V1*V3 + 2*xd2**2*V4) + xd1*a[1,2]*(V2**2 + 3*V1*V3 + 6*xd2**2*V4)) +
6*x1*x2**2*(2*(45*xd1**3*a[1,2]*(-2 + a[1,1] + a[2,2]) + 5*xd1*xd2**2*a[1,2]*(-34 + 21*a[1,1] + 21*a[2,2]) +
2*xd2**3*(-32 + 30*a[1,1]**2 + 95*a[2,2] - 45*a[2,2]**2 - 5*a[1,1]*(7 + 3*a[2,2])) +
xd1**2*xd2*(128 + 135*a[1,1]**2 + 50*a[2,2] - 75*a[2,2]**2 + 10*a[1,1]*(-29 + 6*a[2,2])))*V3 +
V1*(60*xd1*a[1,2]*(-1 + a[1,1] + a[2,2])*V2 + xd2*(-64 + 135*a[1,1]**2 + 240*a[2,2] - 165*a[2,2]**2 - 30*a[1,1]*(4 + a[2,2]))*V2 +
90*x1**2*xd2*(-2*a[1,1] + a[1,1]**2 - (-2 + a[2,2])*a[2,2])*V3) +
30*x1**2*(-2 + a[1,1] + a[2,2])*(2*xd1**3*a[1,2]*V4 - 6*xd1*xd2**2*a[1,2]*V4 + xd2*(a[1,1] - a[2,2])*(V2**2 + 6*xd1**2*V4))) +
6*x2**3*(2*xd2*(45*xd1**2*a[1,2]*(-2 + a[1,1] + a[2,2]) + 5*xd2**2*a[1,2]*(-34 + 21*a[1,1] + 21*a[2,2]) +
xd1*xd2*(64 + 45*a[1,1]**2 - 20*a[2,2] - 15*a[2,2]**2 + 10*a[1,1]*(-10 + 3*a[2,2])))*V3 +
V1*(20*xd2*a[1,2]*(-11 + 9*a[1,1] + 9*a[2,2])*V2 + xd1*(64 + 45*a[1,1]**2 - 20*a[2,2] - 15*a[2,2]**2 + 10*a[1,1]*(-10 + 3*a[2,2]))*V2 +
90*x1**2*xd1*(-2*a[1,1] + a[1,1]**2 - (-2 + a[2,2])*a[2,2])*V3) +
30*x1**2*(-2 + a[1,1] + a[2,2])*(6*xd1**2*xd2*a[1,2]*V4 - 2*xd2**3*a[1,2]*V4 + xd1*(a[1,1] - a[2,2])*(V2**2 + 6*xd2**2*V4))) -
x1*(-6*xd2**3*(-64 + 15*a[1,1]**2 + a[1,1]*(20 - 30*a[2,2]) + 100*a[2,2] - 45*a[2,2]**2)*V2 +
60*xd1*xd2**2*a[1,2]*((-10 + 3*a[1,1] + 3*a[2,2])*V2 + 9*x1**2*(-2 + a[1,1] + a[2,2])*V3) +
xd2*((64 - 45*a[1,1]**2 - 90*a[1,2]**2 + 45*a[2,2] + 45*a[2,2]**2 + 15*a[1,1]*(-13 + 6*a[2,2]))*V1**2 -
6*x1**2*(-64 + 15*a[1,1]**2 + a[1,1]*(20 - 30*a[2,2]) + 100*a[2,2] - 45*a[2,2]**2)*V1*V2 -
6*xd1**2*((-64 + 45*a[1,1]**2 + a[1,1]*(40 - 30*a[2,2]) + 80*a[2,2] - 75*a[2,2]**2)*V2 +
2*x1**2*(-64 + 15*a[1,1]**2 + a[1,1]*(20 - 30*a[2,2]) + 100*a[2,2] - 45*a[2,2]**2)*V3)) +
30*xd1*a[1,2]*((8 + 3*a[1,1] + 3*a[2,2])*V1**2 + 6*x1**4*(-2 + a[1,1] + a[2,2])*V2**2 +
2*V1*(2*x1**2*(-11 + 9*a[1,1] + 9*a[2,2])*V2 + 9*x1**4*(-2 + a[1,1] + a[2,2])*V3) +
2*xd1**2*((-6 + 9*a[1,1] + 9*a[2,2])*V2 + x1**2*(-34 + 21*a[1,1] + 21*a[2,2])*V3 + 6*x1**4*(-2 + a[1,1] + a[2,2])*V4))) +
x2*((30*xd2*a[1,2]*(8 + 3*a[1,1] + 3*a[2,2]) + xd1*(64 + 45*a[1,1]**2 - 90*a[1,2]**2 - 195*a[2,2] - 45*a[2,2]**2 + 45*a[1,1]*(1 + 2*a[2,2])))*V1**2 +
6*x1**2*V1*(-30*xd2*a[1,2]*(2*(-1 + a[1,1] + a[2,2])*V2 + 3*x1**2*(-2 + a[1,1] + a[2,2])*V3) +
xd1*((64 + 165*a[1,1]**2 + 30*a[1,1]*(-8 + a[2,2]) + 120*a[2,2] - 135*a[2,2]**2)*V2 + 90*x1**2*(-2*a[1,1] + a[1,1]**2 - (-2 + a[2,2])*a[2,2])*V3))
+ 6*(-30*xd2*a[1,2]*(x1**4*(-2 + a[1,1] + a[2,2])*V2**2 + xd2**2*((2 - 3*a[1,1] - 3*a[2,2])*V2 + 3*x1**2*(-2 + a[1,1] + a[2,2])*V3)) +
xd1*(30*x1**4*(-2*a[1,1] + a[1,1]**2 - (-2 + a[2,2])*a[2,2])*V2**2 +
xd2**2*((64 + 75*a[1,1]**2 - 40*a[2,2] - 45*a[2,2]**2 + 10*a[1,1]*(-8 + 3*a[2,2]))*V2 +
2*x1**2*(-128 + 75*a[1,1]**2 + 290*a[2,2] - 135*a[2,2]**2 - 10*a[1,1]*(5 + 6*a[2,2]))*V3)) +
10*xd1**2*xd2*a[1,2]*((-10 + 3*a[1,1] + 3*a[2,2])*V2 - x1**2*(-34 + 21*a[1,1] + 21*a[2,2])*V3 - 18*x1**4*(-2 + a[1,1] + a[2,2])*V4) +
xd1**3*((64 + 45*a[1,1]**2 - 20*a[2,2] - 15*a[2,2]**2 + 10*a[1,1]*(-10 + 3*a[2,2]))*V2 +
4*x1**2*(32 + 45*a[1,1]**2 + 35*a[2,2] - 30*a[2,2]**2 + 5*a[1,1]*(-19 + 3*a[2,2]))*V3 +
60*x1**4*(-2*a[1,1] + a[1,1]**2 - (-2 + a[2,2])*a[2,2])*V4))))/720.
I4 = I2+h**4*I4diff
return I0, I2, I4
| 63.51895
| 165
| 0.428512
| 7,056
| 21,787
| 1.323129
| 0.0214
| 0.098115
| 0.087082
| 0.052699
| 0.881427
| 0.824443
| 0.771744
| 0.703942
| 0.584404
| 0.461332
| 0
| 0.311663
| 0.118878
| 21,787
| 342
| 166
| 63.704678
| 0.174663
| 0.014779
| 0
| 0.278431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.003922
| 0
| 0.043137
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
029c3de5f1c3111d52ce81d4c4ce836bf9693853
| 96
|
py
|
Python
|
app/auth/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
from app.auth.token_handler import token_creator
from app.auth.token_verify import verify_token
| 32
| 48
| 0.875
| 16
| 96
| 5
| 0.5
| 0.175
| 0.275
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 2
| 49
| 48
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02badaad3d40dfb031685c9f1e055dbe8077d15f
| 226
|
py
|
Python
|
tests/discover_test.py
|
kamauwashington/python-luhn-algorithm
|
9d50fefb58196f3f764e2dd9da60a3bb50bc0d3f
|
[
"MIT"
] | null | null | null |
tests/discover_test.py
|
kamauwashington/python-luhn-algorithm
|
9d50fefb58196f3f764e2dd9da60a3bb50bc0d3f
|
[
"MIT"
] | null | null | null |
tests/discover_test.py
|
kamauwashington/python-luhn-algorithm
|
9d50fefb58196f3f764e2dd9da60a3bb50bc0d3f
|
[
"MIT"
] | null | null | null |
from luhn import luhn
def test_shouldPassDiscover1() :
assert luhn("6011111111111117")
def test_shouldPassDiscover2() :
assert luhn("6011000990139424")
def test_shouldFail() :
assert not luhn("6011000390134424")
| 22.6
| 39
| 0.756637
| 23
| 226
| 7.304348
| 0.565217
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.260417
| 0.150442
| 226
| 10
| 39
| 22.6
| 0.614583
| 0
| 0
| 0
| 0
| 0
| 0.211454
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.428571
| true
| 0.285714
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
02dbc73dfb6830716a507c3e3651ff800ed27640
| 116
|
py
|
Python
|
det3d/version.py
|
arke812/Det3D
|
00657ed146dce8f5803e84f505a06402f2e14a08
|
[
"Apache-2.0"
] | null | null | null |
det3d/version.py
|
arke812/Det3D
|
00657ed146dce8f5803e84f505a06402f2e14a08
|
[
"Apache-2.0"
] | null | null | null |
det3d/version.py
|
arke812/Det3D
|
00657ed146dce8f5803e84f505a06402f2e14a08
|
[
"Apache-2.0"
] | null | null | null |
# GENERATED VERSION FILE
# TIME: Sat Jul 25 06:13:57 2020
__version__ = '1.0.rc0+8b7d2ce'
short_version = '1.0.rc0'
| 23.2
| 32
| 0.715517
| 21
| 116
| 3.714286
| 0.761905
| 0.205128
| 0.230769
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 0.146552
| 116
| 4
| 33
| 29
| 0.575758
| 0.456897
| 0
| 0
| 1
| 0
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02fd80a6e556a40caf648b37ee0d9ba8cb0acd42
| 6,408
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/test_show_license.py
|
itdependsnetworks/genieparser
|
117bee0b7366afca5dce50820c4fc7a2ca3a8f9d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/test_show_license.py
|
itdependsnetworks/genieparser
|
117bee0b7366afca5dce50820c4fc7a2ca3a8f9d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/test_show_license.py
|
itdependsnetworks/genieparser
|
117bee0b7366afca5dce50820c4fc7a2ca3a8f9d
|
[
"Apache-2.0"
] | null | null | null |
# Python
import unittest
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Metaparset
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.iosxe.show_license import ShowLicense
# ============================
# Unit test for 'show license'
# ============================
class test_show_license(unittest.TestCase):
"""Unit test for 'show license'"""
maxDiff = None
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
"licenses": {
1: {
"feature": "appxk9",
"period_left": "Life time",
"license_type": "Permanent",
"license_state": "Active, In Use",
"count": "Non-Counted",
"license_priority": "Medium",
},
2: {
"feature": "uck9",
"period_left": "Not Activated",
"period_minutes": 0,
"period_seconds": 0,
"license_type": "EvalRightToUse",
"license_state": "Active, Not in Use, EULA not accepted",
"count": "Non-Counted",
"license_priority": "None"
},
3: {
"feature": "securityk9",
"period_left": "Life time",
"license_type": "Permanent",
"license_state": "Active, In Use",
"count": "Non-Counted",
"license_priority": "Medium"
},
4: {
"feature": "ipbasek9",
"period_left": "Life time",
"license_type": "Permanent",
"license_state": "Active, In Use",
"count": "Non-Counted",
"license_priority": "Medium"
},
5: {
"feature": "FoundationSuiteK9",
"period_left": "Not Activated",
"period_minutes": 0,
"period_seconds": 0,
"license_type": "EvalRightToUse",
"license_state": "Active, Not in Use, EULA not accepted",
"count": "Non-Counted",
"license_priority": "None"
},
6: {
"feature": "AdvUCSuiteK9",
"period_left": "Not Activated",
"period_minutes": 0,
"period_seconds": 0,
"license_type": "EvalRightToUse",
"license_state": "Active, Not in Use, EULA not accepted",
"count": "Non-Counted",
"license_priority": "None"
},
7: {
"feature": "cme-srst",
"period_left": "Not Activated",
"period_minutes": 0,
"period_seconds": 0,
"license_type": "EvalRightToUse",
"license_state": "Active, Not in Use, EULA not accepted",
"count_in_use": 0,
"count_violation": 0,
"license_priority": "None"
},
8: {
"feature": "hseck9",
"period_left": "Life time",
"license_type": "Permanent",
"license_state": "Active, In Use",
"count": "Non-Counted",
"license_priority": "Medium"
},
9: {
"feature": "throughput",
"period_left": "Not Activated",
"period_minutes": 0,
"period_seconds": 0,
"license_type": "EvalRightToUse",
"license_state": "Active, Not in Use, EULA not accepted",
"count": "Non-Counted",
"license_priority": "None"
}
}
}
golden_output1 = {'execute.return_value': '''
Index 1 Feature: appxk9
Period left: Life time
License Type: Permanent
License State: Active, In Use
License Count: Non-Counted
License Priority: Medium
Index 2 Feature: uck9
Period left: Not Activated
Period Used: 0 minute 0 second
License Type: EvalRightToUse
License State: Active, Not in Use, EULA not accepted
License Count: Non-Counted
License Priority: None
Index 3 Feature: securityk9
Period left: Life time
License Type: Permanent
License State: Active, In Use
License Count: Non-Counted
License Priority: Medium
Index 4 Feature: ipbasek9
Period left: Life time
License Type: Permanent
License State: Active, In Use
License Count: Non-Counted
License Priority: Medium
Index 5 Feature: FoundationSuiteK9
Period left: Not Activated
Period Used: 0 minute 0 second
License Type: EvalRightToUse
License State: Active, Not in Use, EULA not accepted
License Count: Non-Counted
License Priority: None
Index 6 Feature: AdvUCSuiteK9
Period left: Not Activated
Period Used: 0 minute 0 second
License Type: EvalRightToUse
License State: Active, Not in Use, EULA not accepted
License Count: Non-Counted
License Priority: None
Index 7 Feature: cme-srst
Period left: Not Activated
Period Used: 0 minute 0 second
License Type: EvalRightToUse
License State: Active, Not in Use, EULA not accepted
License Count: 0/0 (In-use/Violation)
License Priority: None
Index 8 Feature: hseck9
Period left: Life time
License Type: Permanent
License State: Active, In Use
License Count: Non-Counted
License Priority: Medium
Index 9 Feature: throughput
Period left: Not Activated
Period Used: 0 minute 0 second
License Type: EvalRightToUse
License State: Active, Not in Use, EULA not accepted
License Count: Non-Counted
License Priority: None
'''
}
def test_show_license_full(self):
self.device = Mock(**self.golden_output1)
obj = ShowLicense(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_license_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLicense(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
if __name__ == '__main__':
unittest.main()
| 33.904762
| 69
| 0.5451
| 631
| 6,408
| 5.41046
| 0.153724
| 0.029291
| 0.094903
| 0.103105
| 0.804921
| 0.76157
| 0.76157
| 0.76157
| 0.717633
| 0.708846
| 0
| 0.014378
| 0.348783
| 6,408
| 188
| 70
| 34.085106
| 0.803738
| 0.021536
| 0
| 0.623529
| 0
| 0
| 0.607286
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 1
| 0.011765
| false
| 0
| 0.029412
| 0
| 0.076471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f31188ef1fdb9c416b2af25ebd72d59a831e68ad
| 2,369
|
py
|
Python
|
test_tradutor_lexico.py
|
Psidium/tradutores-ga
|
7d06e6014bdc8a1a77971c3f4374641370937d14
|
[
"MIT"
] | null | null | null |
test_tradutor_lexico.py
|
Psidium/tradutores-ga
|
7d06e6014bdc8a1a77971c3f4374641370937d14
|
[
"MIT"
] | null | null | null |
test_tradutor_lexico.py
|
Psidium/tradutores-ga
|
7d06e6014bdc8a1a77971c3f4374641370937d14
|
[
"MIT"
] | null | null | null |
import unittest
import tradutor_lexico
class TestLexical(unittest.TestCase):
def test_definition(self):
tokens = tradutor_lexico.generate_tokens("int a;")
self.assertEquals(len(tokens), 2)
self.assertEquals(tokens[0].token, "reserved_word")
self.assertEquals(tokens[0].lexeme, "int")
self.assertEquals(tokens[1].token, "id")
self.assertEquals(tokens[1].lexeme, 1)
def test_definition_with_comments(self):
tokens = tradutor_lexico.generate_tokens("int a; // 'a' is a terrible variable name")
self.assertEquals(len(tokens), 2)
self.assertEquals(tokens[0].token, "reserved_word")
self.assertEquals(tokens[0].lexeme, "int")
self.assertEquals(tokens[1].token, "id")
self.assertEquals(tokens[1].lexeme, 1)
def test_attribution(self):
tokens = tradutor_lexico.generate_tokens("int a = 32;")
self.assertEquals(len(tokens), 4)
self.assertEquals(tokens[0].token, "reserved_word")
self.assertEquals(tokens[0].lexeme, "int")
self.assertEquals(tokens[1].token, "id")
self.assertEquals(tokens[1].lexeme, 1)
self.assertEquals(tokens[2].token, "equal_op")
self.assertEquals(tokens[2].lexeme, "=")
self.assertEquals(tokens[3].token, "num")
self.assertEquals(tokens[3].lexeme, "32")
def test_attribution_with_comments(self):
tokens = tradutor_lexico.generate_tokens("int a = 32; //a again")
self.assertEquals(len(tokens), 4)
self.assertEquals(tokens[0].token, "reserved_word")
self.assertEquals(tokens[0].lexeme, "int")
self.assertEquals(tokens[1].token, "id")
self.assertEquals(tokens[1].lexeme, 1)
self.assertEquals(tokens[2].token, "equal_op")
self.assertEquals(tokens[2].lexeme, "=")
self.assertEquals(tokens[3].token, "num")
self.assertEquals(tokens[3].lexeme, "32")
def test_should_return_none_when_invalid_command(self):
tokens = tradutor_lexico.generate_tokens("int = int")
self.assertEquals(tokens, None)
def test_if_sentece(self):
tokens = tradutor_lexico.generate_tokens("if (3 < 4) { int a = 32; }" +
"else { int b = 2; }")
self.assertEquals(len(tokens), 19)
if __name__ == "__main__":
unittest.main()
| 41.561404
| 93
| 0.642465
| 285
| 2,369
| 5.189474
| 0.178947
| 0.324544
| 0.371873
| 0.124408
| 0.802569
| 0.802569
| 0.776876
| 0.749155
| 0.720757
| 0.689655
| 0
| 0.025282
| 0.215281
| 2,369
| 56
| 94
| 42.303571
| 0.770307
| 0
| 0
| 0.583333
| 1
| 0
| 0.101731
| 0
| 0
| 0
| 0
| 0
| 0.625
| 1
| 0.125
| false
| 0
| 0.041667
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b8bb21a2620144136f2bd4c9fe081fcd86d5f5ad
| 23,568
|
py
|
Python
|
tests/tests/test_mender_gateway.py
|
zffgithub/integration
|
396d9ef053b28814b0e3323a2ebaa5c0b5fc75d3
|
[
"Apache-2.0"
] | null | null | null |
tests/tests/test_mender_gateway.py
|
zffgithub/integration
|
396d9ef053b28814b0e3323a2ebaa5c0b5fc75d3
|
[
"Apache-2.0"
] | null | null | null |
tests/tests/test_mender_gateway.py
|
zffgithub/integration
|
396d9ef053b28814b0e3323a2ebaa5c0b5fc75d3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
import shutil
import subprocess
import tempfile
from .. import conftest
from ..common_setup import (
standard_setup_one_client_bootstrapped_with_gateway,
standard_setup_two_clients_bootstrapped_with_gateway,
enterprise_one_client_bootstrapped_with_gateway,
enterprise_two_clients_bootstrapped_with_gateway,
)
from .common_artifact import get_script_artifact
from .common_update import common_update_procedure, update_image
from ..MenderAPI import DeviceAuthV2, Deployments
from .mendertesting import MenderTesting
from ..helpers import Helpers
from testutils.infra.device import MenderDeviceGroup
@pytest.fixture(scope="function")
def image_with_mender_conf_and_mender_gateway_conf(request):
"""Insert mender.conf and mender-gateway.conf into an image"""
with tempfile.TemporaryDirectory() as d:
def cleanup():
shutil.rmtree(d, ignore_errors=True)
request.addfinalizer(cleanup)
yield lambda image, mender_conf, mender_gateway_conf: add_mender_conf_and_mender_gateway_conf(
d, image, mender_conf, mender_gateway_conf
)
def add_mender_conf_and_mender_gateway_conf(d, image, mender_conf, mender_gateway_conf):
mender_conf_tmp = os.path.join(d, "mender.conf")
with open(mender_conf_tmp, "w") as f:
f.write(mender_conf)
mender_gateway_conf_tmp = os.path.join(d, "mender-gateway.conf")
with open(mender_gateway_conf_tmp, "w") as f:
f.write(mender_gateway_conf)
new_image = os.path.join(d, image)
shutil.copy(image, new_image)
instr_file = os.path.join(d, "write.instr")
with open(os.path.join(d, "write.instr"), "w") as f:
f.write(
"""cd /etc/mender
rm mender.conf
rm mender-gateway.conf
write {local1} mender.conf
write {local2} mender-gateway.conf
""".format(
local1=mender_conf_tmp, local2=mender_gateway_conf_tmp,
)
)
subprocess.run(
["debugfs", "-w", "-f", instr_file, new_image],
check=True,
stdout=subprocess.PIPE,
)
subprocess.run(
["debugfs", "-R", "cat /etc/mender/mender.conf", new_image],
check=True,
stdout=subprocess.PIPE,
)
subprocess.run(
["debugfs", "-R", "cat /etc/mender/mender-gateway.conf", new_image],
check=True,
stdout=subprocess.PIPE,
)
return new_image
class BaseTestMenderGateway(MenderTesting):
def do_test_deployment_one_device(self, env, valid_image_with_mender_conf):
mender_device = env.device
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device.run("cat /etc/mender/mender.conf")
device_id = Helpers.ip_to_device_id_map(
MenderDeviceGroup([mender_device.host_string]), devauth=devauth,
)[mender_device.host_string]
update_image(
mender_device,
host_ip,
expected_mender_clients=1,
install_image=valid_image_with_mender_conf(mender_conf),
devauth=devauth,
deploy=deploy,
devices=[device_id],
)
def do_test_deployment_gateway_and_one_device(
self,
env,
valid_image_with_mender_conf,
image_with_mender_conf_and_mender_gateway_conf,
):
mender_device = env.device
mender_gateway = env.device_gateway
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
mender_device_mender_conf = mender_device.run("cat /etc/mender/mender.conf")
mender_gateway_gateway_conf = mender_gateway.run(
"cat /etc/mender/mender-gateway.conf"
)
mender_gateway_mender_conf = mender_gateway.run("cat /etc/mender/mender.conf")
host_ip = env.get_virtual_network_host_ip()
ip_to_device_id = Helpers.ip_to_device_id_map(
MenderDeviceGroup([mender_device.host_string, mender_gateway.host_string]),
devauth=devauth,
)
mender_gateway_image = image_with_mender_conf_and_mender_gateway_conf(
"mender-gateway-image-full-cmdline-%s.ext4" % conftest.machine_name,
mender_gateway_mender_conf,
mender_gateway_gateway_conf,
)
def update_device():
device_id = ip_to_device_id[mender_device.host_string]
update_image(
mender_device,
host_ip,
expected_mender_clients=1,
install_image=valid_image_with_mender_conf(mender_device_mender_conf),
devauth=devauth,
deploy=deploy,
devices=[device_id],
)
gateway_id = ip_to_device_id[mender_gateway.host_string]
deployment_id, _ = common_update_procedure(
mender_gateway_image,
devices=[gateway_id],
devauth=devauth,
deploy=deploy,
deployment_triggered_callback=update_device,
verify_status=False,
)
deploy.check_expected_statistics(deployment_id, "success", 1)
deploy.check_expected_status("finished", deployment_id)
def do_test_deployment_two_devices_update_both(
self, env, valid_image_with_mender_conf
):
device_group = env.device_group
mender_device_1 = device_group[0]
mender_device_2 = device_group[1]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
device_id_2 = ip_to_device_id[mender_device_2.host_string]
update_image(
mender_device_1,
host_ip,
expected_mender_clients=2,
install_image=valid_image_with_mender_conf(mender_conf),
devauth=devauth,
deploy=deploy,
devices=[device_id_1, device_id_2],
)
def do_test_deployment_two_devices_update_one(
self, env, valid_image_with_mender_conf
):
device_group = env.device_group
mender_device_1 = device_group[0]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
update_image(
mender_device_1,
host_ip,
expected_mender_clients=1,
install_image=valid_image_with_mender_conf(mender_conf),
devauth=devauth,
deploy=deploy,
devices=[device_id_1],
)
def do_test_deployment_two_devices_parallel_updates(
self, env, valid_image_with_mender_conf
):
device_group = env.device_group
mender_device_1 = device_group[0]
mender_device_2 = device_group[1]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
valid_image = valid_image_with_mender_conf(mender_conf)
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
device_id_2 = ip_to_device_id[mender_device_2.host_string]
reboot = {mender_device_1: None, mender_device_2: None}
with mender_device_1.get_reboot_detector(host_ip) as reboot[
mender_device_1
], mender_device_2.get_reboot_detector(host_ip) as reboot[mender_device_2]:
deployment_id_1, expected_image_id_1 = common_update_procedure(
valid_image, devices=[device_id_1], devauth=devauth, deploy=deploy,
)
deployment_id_2, expected_image_id_2 = common_update_procedure(
valid_image, devices=[device_id_2], devauth=devauth, deploy=deploy,
)
reboot[mender_device_1].verify_reboot_performed()
reboot[mender_device_2].verify_reboot_performed()
deploy.check_expected_statistics(deployment_id_1, "success", 1)
deploy.get_logs(device_id_1, deployment_id_1, expected_status=404)
deploy.check_expected_statistics(deployment_id_2, "success", 1)
deploy.get_logs(device_id_2, deployment_id_2, expected_status=404)
assert mender_device_1.yocto_id_installed_on_machine() == expected_image_id_1
assert mender_device_2.yocto_id_installed_on_machine() == expected_image_id_2
deploy.check_expected_status("finished", deployment_id_1)
deploy.check_expected_status("finished", deployment_id_2)
def do_test_deployment_two_devices_parallel_updates_one_failure(
self, env, valid_image_with_mender_conf
):
pytest.skip("Disabled due to MEN-5567.")
device_group = env.device_group
mender_device_1 = device_group[0]
mender_device_2 = device_group[1]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
valid_image = valid_image_with_mender_conf(mender_conf)
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
device_id_2 = ip_to_device_id[mender_device_2.host_string]
reboot = {mender_device_1: None, mender_device_2: None}
with mender_device_1.get_reboot_detector(host_ip) as reboot[
mender_device_1
], mender_device_2.get_reboot_detector(host_ip) as reboot[mender_device_2]:
deployment_id_1, expected_image_id_1 = common_update_procedure(
valid_image, devices=[device_id_1], devauth=devauth, deploy=deploy,
)
deployment_id_2, expected_image_id_2 = common_update_procedure(
"broken_update.ext4",
devices=[device_id_2],
devauth=devauth,
deploy=deploy,
)
reboot[mender_device_1].verify_reboot_performed()
reboot[mender_device_2].verify_reboot_performed(number_of_reboots=2)
assert mender_device_1.yocto_id_installed_on_machine() == expected_image_id_1
assert mender_device_2.yocto_id_installed_on_machine() != expected_image_id_2
deploy.check_expected_status("finished", deployment_id_1)
deploy.check_expected_status("finished", deployment_id_2)
deploy.check_expected_statistics(deployment_id_1, "success", 1)
deploy.get_logs(device_id_1, deployment_id_1, expected_status=404)
deploy.check_expected_statistics(deployment_id_2, "failure", 1)
assert "Reboot to the new update failed" in deploy.get_logs(
device_id_2, deployment_id_2
)
def do_test_deployment_two_devices_parallel_updates_one_aborted(
self, env, valid_image_with_mender_conf
):
device_group = env.device_group
mender_device_1 = device_group[0]
mender_device_2 = device_group[1]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
valid_image = valid_image_with_mender_conf(mender_conf)
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
device_id_2 = ip_to_device_id[mender_device_2.host_string]
reboot = {mender_device_1: None, mender_device_2: None}
with mender_device_1.get_reboot_detector(host_ip) as reboot[
mender_device_1
], mender_device_2.get_reboot_detector(host_ip) as reboot[mender_device_2]:
deployment_id_1, expected_image_id_1 = common_update_procedure(
valid_image, devices=[device_id_1], devauth=devauth, deploy=deploy,
)
deployment_id_2, expected_image_id_2 = common_update_procedure(
valid_image, devices=[device_id_2], devauth=devauth, deploy=deploy,
)
deploy.check_expected_statistics(deployment_id_2, "rebooting", 1)
deploy.abort(deployment_id_2)
reboot[mender_device_1].verify_reboot_performed()
reboot[mender_device_2].verify_reboot_performed()
deploy.check_expected_statistics(deployment_id_1, "success", 1)
deploy.get_logs(device_id_1, deployment_id_1, expected_status=404)
deploy.check_expected_statistics(deployment_id_2, "aborted", 1)
deploy.get_logs(device_id_2, deployment_id_2, expected_status=404)
assert mender_device_1.yocto_id_installed_on_machine() == expected_image_id_1
assert mender_device_2.yocto_id_installed_on_machine() != expected_image_id_2
deploy.check_expected_status("finished", deployment_id_1)
deploy.check_expected_status("finished", deployment_id_2)
def do_test_deployment_two_devices_parallel_updates_multiple_deployments(
self, env, valid_image_with_mender_conf
):
device_group = env.device_group
mender_device_1 = device_group[0]
mender_device_2 = device_group[1]
devauth = DeviceAuthV2(env.auth)
deploy = Deployments(env.auth, devauth)
host_ip = env.get_virtual_network_host_ip()
mender_conf = mender_device_1.run("cat /etc/mender/mender.conf")
valid_image = valid_image_with_mender_conf(mender_conf)
ip_to_device_id = Helpers.ip_to_device_id_map(device_group, devauth=devauth)
device_id_1 = ip_to_device_id[mender_device_1.host_string]
device_id_2 = ip_to_device_id[mender_device_2.host_string]
reboot = {mender_device_1: None, mender_device_2: None}
with mender_device_1.get_reboot_detector(host_ip) as reboot[
mender_device_1
], mender_device_2.get_reboot_detector(host_ip) as reboot[mender_device_2]:
deployment_id_1, expected_image_id_1 = common_update_procedure(
valid_image, devices=[device_id_1], devauth=devauth, deploy=deploy,
)
with tempfile.NamedTemporaryFile() as tf:
artifact_name = "%s-script-1" % device_id_2
script_image = get_script_artifact(
b"exit 0", artifact_name, conftest.machine_name, tf.name,
)
deploy.upload_image(script_image)
deployment_id_2 = deploy.trigger_deployment(
name="script 1", artifact_name=artifact_name, devices=[device_id_2],
)
with tempfile.NamedTemporaryFile() as tf:
artifact_name = "%s-script-2" % device_id_2
script_image = get_script_artifact(
b"exit 0", artifact_name, conftest.machine_name, tf.name,
)
deploy.upload_image(script_image)
deployment_id_3 = deploy.trigger_deployment(
name="script 2", artifact_name=artifact_name, devices=[device_id_2],
)
reboot[mender_device_1].verify_reboot_performed()
reboot[mender_device_2].verify_reboot_not_performed(300)
deploy.check_expected_statistics(deployment_id_1, "success", 1)
deploy.get_logs(device_id_1, deployment_id_1, expected_status=404)
deploy.check_expected_statistics(deployment_id_2, "success", 1)
deploy.get_logs(device_id_2, deployment_id_2, expected_status=404)
deploy.check_expected_statistics(deployment_id_3, "success", 1)
deploy.get_logs(device_id_2, deployment_id_3, expected_status=404)
assert mender_device_1.yocto_id_installed_on_machine() == expected_image_id_1
deploy.check_expected_status("finished", deployment_id_1)
deploy.check_expected_status("finished", deployment_id_2)
deploy.check_expected_status("finished", deployment_id_3)
class TestMenderGatewayOpenSource(BaseTestMenderGateway):
@MenderTesting.fast
def test_deployment_one_device(
self,
standard_setup_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_one_device(
standard_setup_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_gateway_and_one_device(
self,
standard_setup_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
image_with_mender_conf_and_mender_gateway_conf,
):
self.do_test_deployment_gateway_and_one_device(
standard_setup_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
image_with_mender_conf_and_mender_gateway_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_update_both(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_update_both(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_update_one(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_update_one(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_one_failure(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_one_failure(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_one_aborted(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_one_aborted(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_multiple_deployments(
self,
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_multiple_deployments(
standard_setup_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
class TestMenderGatewayEnterprise(BaseTestMenderGateway):
@MenderTesting.fast
def test_deployment_one_device(
self,
enterprise_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_one_device(
enterprise_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_gateway_and_one_device(
self,
enterprise_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
image_with_mender_conf_and_mender_gateway_conf,
):
self.do_test_deployment_gateway_and_one_device(
enterprise_one_client_bootstrapped_with_gateway,
valid_image_with_mender_conf,
image_with_mender_conf_and_mender_gateway_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_update_both(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_update_both(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_update_one(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_update_one(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_one_failure(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_one_failure(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_one_aborted(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_one_aborted(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
@MenderTesting.fast
def test_deployment_two_devices_parallel_updates_multiple_deployments(
self,
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
):
self.do_test_deployment_two_devices_parallel_updates_multiple_deployments(
enterprise_two_clients_bootstrapped_with_gateway,
valid_image_with_mender_conf,
)
| 39.084577
| 102
| 0.701926
| 2,927
| 23,568
| 5.136659
| 0.072771
| 0.063851
| 0.054872
| 0.069504
| 0.880146
| 0.864051
| 0.839308
| 0.819355
| 0.790289
| 0.753242
| 0
| 0.013369
| 0.231967
| 23,568
| 602
| 103
| 39.149502
| 0.817248
| 0.027071
| 0
| 0.708502
| 0
| 0
| 0.033229
| 0.014637
| 0
| 0
| 0
| 0
| 0.016194
| 1
| 0.05668
| false
| 0
| 0.026316
| 0
| 0.091093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2a88c97cfb31acf965579d2a9e36dac6d3c7de4
| 2,744
|
py
|
Python
|
biobb_haddock/test/unitests/test_haddock/test_topology_docker.py
|
bioexcel/biobb_haddock
|
bbc5fdf2da7c15608e2957e26c4a4aec45170b95
|
[
"Apache-2.0"
] | null | null | null |
biobb_haddock/test/unitests/test_haddock/test_topology_docker.py
|
bioexcel/biobb_haddock
|
bbc5fdf2da7c15608e2957e26c4a4aec45170b95
|
[
"Apache-2.0"
] | null | null | null |
biobb_haddock/test/unitests/test_haddock/test_topology_docker.py
|
bioexcel/biobb_haddock
|
bbc5fdf2da7c15608e2957e26c4a4aec45170b95
|
[
"Apache-2.0"
] | null | null | null |
from biobb_common.tools import test_fixtures as fx
from biobb_haddock.haddock.haddock import haddock
# class TesthaddockMipDocker():
# def setUp(self):
# fx.test_setup(self, 'haddock_mip_docker')
#
# def tearDown(self):
# #pass
# fx.test_teardown(self)
#
# def test_haddock_mip_docker(self):
# haddock(properties=self.properties, **self.paths)
# assert fx.not_empty(self.paths['output_cube_path'])
# assert fx.not_empty(self.paths['output_grd_path'])
# assert fx.equal(self.paths['output_grd_path'], self.paths['ref_output_haddock_mip_grd_path'])
# assert fx.equal(self.paths['output_cube_path'], self.paths['ref_output_haddock_mip_cube_path'])
# class TesthaddockDockingDocker():
# def setUp(self):
# fx.test_setup(self, 'haddock_docking_docker')
#
# def tearDown(self):
# pass
# #fx.test_teardown(self)
#
# def test_haddock_docking_docker(self):
# haddock(properties=self.properties, **self.paths)
# assert fx.not_empty(self.paths['output_pdb_path'])
# assert fx.not_empty(self.paths['output_grd_path'])
# assert fx.not_empty(self.paths['output_rst_path'])
# # Can not compare PDB files formed excluvely by HETATM
# #assert fx.equal(self.paths['output_pdb_path'], self.paths['ref_output_haddock_docking_pdb_path'])
# # GRD differs between executions
# #assert fx.equal(self.paths['output_grd_path'], self.paths['ref_output_haddock_docking_grd_path'])
# # RST differs between executions
# #assert fx.equal(self.paths['output_rst_path'], self.paths['ref_output_haddock_docking_rst_path'])
class TesthaddockEnergyDocker():
def setUp(self):
fx.test_setup(self, 'haddock_energy_docker')
def tearDown(self):
# pass
fx.test_teardown(self)
def test_haddock_energy(self):
haddock(properties=self.properties, **self.paths)
assert fx.not_empty(self.paths['output_byat_path'])
# assert fx.equal(self.paths['output_byat_path'], self.paths['ref_output_byat_path'])
# class TesthaddockSolvationDocker():
# def setUp(self):
# fx.test_setup(self, 'haddock_solvation_docker')
#
# def tearDown(self):
# #pass
# fx.test_teardown(self)
#
# def test_haddock_mip_docker(self):
# haddock(properties=self.properties, **self.paths)
# assert fx.not_empty(self.paths['output_cube_path'])
# assert fx.not_empty(self.paths['output_grd_path'])
# assert fx.equal(self.paths['output_grd_path'], self.paths['ref_output_haddock_mip_grd_path'])
# assert fx.equal(self.paths['output_cube_path'], self.paths['ref_output_haddock_mip_cube_path'])
| 40.955224
| 108
| 0.685496
| 358
| 2,744
| 4.952514
| 0.136872
| 0.142132
| 0.135364
| 0.072194
| 0.822335
| 0.79639
| 0.780598
| 0.717992
| 0.628314
| 0.585448
| 0
| 0
| 0.18586
| 2,744
| 67
| 109
| 40.955224
| 0.793644
| 0.800656
| 0
| 0
| 0
| 0
| 0.075665
| 0.042945
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.3
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
a239e321ada233d2a86517ca175d684f94700f35
| 14,338
|
py
|
Python
|
foulefactoryapilib/controllers/projects_controller.py
|
foulefactory/FouleFactory-SDK-Python
|
6a6b0edd3a6abe2c1f20eb370ee2897eea1cffd8
|
[
"MIT"
] | null | null | null |
foulefactoryapilib/controllers/projects_controller.py
|
foulefactory/FouleFactory-SDK-Python
|
6a6b0edd3a6abe2c1f20eb370ee2897eea1cffd8
|
[
"MIT"
] | null | null | null |
foulefactoryapilib/controllers/projects_controller.py
|
foulefactory/FouleFactory-SDK-Python
|
6a6b0edd3a6abe2c1f20eb370ee2897eea1cffd8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
foulefactoryapilib.controllers.projects_controller
This file was automatically generated by APIMATIC BETA v2.0 on 09/16/2016
"""
from .base_controller import *
class ProjectsController(BaseController):
"""A Controller to access Endpoints in the foulefactoryapilib API."""
def __init__(self, http_client = None, http_call_back = None):
"""Constructor which allows a different HTTP client for this controller."""
BaseController.__init__(self, http_client, http_call_back)
def create_projects_create_project(self,
project,
accept_language = 'fr'):
"""Does a POST request to /v1.1/projects.
Create new project
Args:
project (ProjectWriterServiceModel): TODO: type description here.
Example:
accept_language (string, optional): TODO: type description here.
Example: fr
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects'
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'Accept-Language': accept_language
}
# Prepare the API call.
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(project), username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
def get_projects_get_user_projects(self):
"""Does a GET request to /v1.1/projects.
Get All projects
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects'
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json'
}
# Prepare the API call.
_request = self.http_client.get(_query_url, headers=_headers, username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
def get_projects_get_project_files(self,
id,
accept_language = 'fr'):
"""Does a GET request to /v1.1/projects/{id}/urlCsvFiles.
Get csv files by project id
Args:
id (int): TODO: type description here. Example:
accept_language (string, optional): TODO: type description here.
Example: fr
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects/{id}/urlCsvFiles'
# Process optional template parameters
_query_builder = APIHelper.append_url_with_template_parameters(_query_builder, {
'id': id
})
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json',
'Accept-Language': accept_language
}
# Prepare the API call.
_request = self.http_client.get(_query_url, headers=_headers, username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
def get_projects_get_project_task_lines(self,
id,
accept_language = 'fr'):
"""Does a GET request to /v1.1/projects/{id}/taskLines.
Get task lines by project id
Args:
id (int): TODO: type description here. Example:
accept_language (string, optional): TODO: type description here.
Example: fr
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects/{id}/taskLines'
# Process optional template parameters
_query_builder = APIHelper.append_url_with_template_parameters(_query_builder, {
'id': id
})
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json',
'Accept-Language': accept_language
}
# Prepare the API call.
_request = self.http_client.get(_query_url, headers=_headers, username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
def get_projects_get_project_tasks(self,
id,
accept_language = 'fr'):
"""Does a GET request to /v1.1/projects/{id}/tasks.
Get tasks by project id
Args:
id (int): TODO: type description here. Example:
accept_language (string, optional): TODO: type description here.
Example: fr
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects/{id}/tasks'
# Process optional template parameters
_query_builder = APIHelper.append_url_with_template_parameters(_query_builder, {
'id': id
})
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json',
'Accept-Language': accept_language
}
# Prepare the API call.
_request = self.http_client.get(_query_url, headers=_headers, username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
def get_projects_get(self,
id,
accept_language = 'fr'):
"""Does a GET request to /v1.1/projects/{id}.
Get project by id
Args:
id (int): TODO: type description here. Example:
accept_language (string, optional): TODO: type description here.
Example: fr
Returns:
mixed: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
_query_builder = Configuration.BASE_URI
# Prepare query string for API call
_query_builder += '/v1.1/projects/{id}'
# Process optional template parameters
_query_builder = APIHelper.append_url_with_template_parameters(_query_builder, {
'id': id
})
# Validate and preprocess url
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'user-agent': 'APIMATIC 2.0',
'accept': 'application/json',
'Accept-Language': accept_language
}
# Prepare the API call.
_request = self.http_client.get(_query_url, headers=_headers, username=Configuration.basic_auth_user_name, password=Configuration.basic_auth_password)
# Invoke the on before request HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_before_request(_request)
# Invoke the API call to fetch the response.
_response = self.http_client.execute_as_string(_request)
# Wrap the request and the response in an HttpContext object
_context = HttpContext(_request, _response)
# Invoke the on after response HttpCallBack if specified
if self.http_call_back != None:
self.http_call_back.on_after_response(_context)
# Global error handling using HTTP status codes.
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_response.raw_body)
| 34.301435
| 205
| 0.624494
| 1,624
| 14,338
| 5.283251
| 0.093596
| 0.035431
| 0.036364
| 0.044755
| 0.922261
| 0.911655
| 0.911655
| 0.911655
| 0.908392
| 0.908392
| 0
| 0.004858
| 0.310852
| 14,338
| 417
| 206
| 34.383693
| 0.863475
| 0.404798
| 0
| 0.820896
| 1
| 0
| 0.068258
| 0.010906
| 0
| 0
| 0
| 0.023981
| 0
| 1
| 0.052239
| false
| 0.044776
| 0.007463
| 0
| 0.11194
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a241a5e573ad07d18945390cf9811c9698b81054
| 13,772
|
py
|
Python
|
idgo_admin/managers.py
|
DataSud/DataSud-2017-2019
|
c73e67f22fa9bb38577c286271d02c2d9a708e40
|
[
"Apache-2.0"
] | 1
|
2020-12-02T09:44:28.000Z
|
2020-12-02T09:44:28.000Z
|
idgo_admin/managers.py
|
DataSud/DataSud-2017-2019
|
c73e67f22fa9bb38577c286271d02c2d9a708e40
|
[
"Apache-2.0"
] | null | null | null |
idgo_admin/managers.py
|
DataSud/DataSud-2017-2019
|
c73e67f22fa9bb38577c286271d02c2d9a708e40
|
[
"Apache-2.0"
] | 1
|
2020-12-08T16:51:34.000Z
|
2020-12-08T16:51:34.000Z
|
# Copyright (c) 2017-2019 Neogeo-Technologies.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.apps import apps
from django.contrib.gis.db import models
from django.utils import timezone
from idgo_admin.utils import clean_my_obj
from itertools import chain
# =========================================================
# Définition de Managers pour les jeux de données (Dataset)
# =========================================================
class DefaultDatasetManager(models.Manager):
def create(self, **kwargs):
save_opts = kwargs.pop('save_opts', {})
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db, **save_opts)
return obj
def get_queryset(self, **kwargs):
RemoteCkanDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
RemoteCswDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
this = RemoteCkanDataset.objects.all().values_list('dataset__pk', flat=True)
that = RemoteCswDataset.objects.all().values_list('dataset__pk', flat=True)
return super().get_queryset(**kwargs).exclude(pk__in=list(chain(this, that)))
def all(self):
return self.get_queryset()
def get(self, **kwargs):
return super().get(**kwargs)
class HarvestedCkanDatasetManager(models.Manager):
def create(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
remote_organisation = kwargs.pop('remote_organisation', None)
# Dans un premier temps on crée le jeu de données sans le synchroniser à CKAN
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
save_opts = {'current_user': None, 'synchronize': False}
dataset = Dataset.default.create(save_opts=save_opts, **kwargs)
# Puis on crée la liaison avec le CKAN distant
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
RemoteDataset.objects.create(
created_by=dataset.editor,
dataset=dataset,
remote_instance=remote_instance,
remote_dataset=remote_dataset,
remote_organisation=remote_organisation,
)
# Enfin on met à jour le jeu de données et on le synchronize avec CKAN
DataType = apps.get_model(app_label='idgo_admin', model_name='DataType')
dataset.data_type = DataType.objects.filter(slug='donnees-moissonnees')
dataset.save(current_user=None, synchronize=True)
return dataset
def filter(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
remote_organisation = kwargs.pop('remote_organisation', None)
remote_organisation__in = kwargs.pop('remote_organisation__in', None)
kvp = clean_my_obj({
'remote_instance': remote_instance,
'remote_dataset': remote_dataset,
'remote_organisation': remote_organisation,
'remote_organisation__in': remote_organisation__in})
if kvp:
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
return Dataset.objects.filter(id__in=[
entry.dataset.id for entry in RemoteDataset.objects.filter(**kvp)])
return super().filter(**kwargs)
def get(self, **kwargs):
remote_dataset = kwargs.pop('remote_dataset', None)
if remote_dataset:
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
return RemoteDataset.objects.get(remote_dataset=remote_dataset).dataset
return super().get(**kwargs)
def get_queryset(self, **kwargs):
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
return Dataset.objects.filter(
id__in=[entry.dataset.id for entry in RemoteDataset.objects.all()])
def update_or_create(self, **kwargs):
remote_dataset = kwargs.get('remote_dataset', None)
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCkanDataset')
try:
dataset = self.get(remote_dataset=remote_dataset)
except RemoteDataset.DoesNotExist:
dataset = self.create(**kwargs)
created = True
else:
created = False
harvested = RemoteDataset.objects.get(dataset=dataset)
harvested.updated_on = timezone.now()
harvested.remote_organisation = kwargs.pop('remote_organisation', None)
harvested.save()
for k, v in kwargs.items():
setattr(dataset, k, v)
dataset.save(current_user=None, synchronize=True)
return dataset, created
class HarvestedCswDatasetManager(models.Manager):
def create(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
# Dans un premier temps on crée le jeu de données sans le synchroniser à CSW
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
save_opts = {'current_user': None, 'synchronize': False}
dataset = Dataset.default.create(save_opts=save_opts, **kwargs)
# Puis on crée la liaison avec le CSW distant
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
RemoteDataset.objects.create(
created_by=dataset.editor,
dataset=dataset,
remote_instance=remote_instance,
remote_dataset=remote_dataset,
)
# Enfin on met à jour le jeu de données et on le synchronize avec CSW
DataType = apps.get_model(app_label='idgo_admin', model_name='DataType')
dataset.data_type = DataType.objects.filter(slug='donnees-moissonnees')
dataset.save(current_user=None, synchronize=True)
return dataset
def filter(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
kvp = clean_my_obj({
'remote_instance': remote_instance,
'remote_dataset': remote_dataset,
})
if kvp:
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
return Dataset.objects.filter(id__in=[
entry.dataset.id for entry in RemoteDataset.objects.filter(**kvp)])
return super().filter(**kwargs)
def get(self, **kwargs):
remote_dataset = kwargs.pop('remote_dataset', None)
if remote_dataset:
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
return RemoteDataset.objects.get(remote_dataset=remote_dataset).dataset
return super().get(**kwargs)
def get_queryset(self, **kwargs):
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
return Dataset.objects.filter(
id__in=[entry.dataset.id for entry in RemoteDataset.objects.all()])
def update_or_create(self, **kwargs):
remote_dataset = kwargs.get('remote_dataset', None)
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteCswDataset')
try:
dataset = self.get(remote_dataset=remote_dataset)
except RemoteDataset.DoesNotExist:
dataset = self.create(**kwargs)
created = True
else:
created = False
harvested = RemoteDataset.objects.get(dataset=dataset)
harvested.updated_on = timezone.now()
# harvested.remote_organisation = kwargs.pop('remote_organisation', None)
harvested.save()
for k, v in kwargs.items():
setattr(dataset, k, v)
dataset.save(current_user=None, synchronize=True)
return dataset, created
class HarvestedDcatDatasetManager(models.Manager):
def create(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
# Dans un premier temps on crée le jeu de données sans le synchroniser à DCAT
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
save_opts = {'current_user': None, 'synchronize': False}
dataset = Dataset.default.create(save_opts=save_opts, **kwargs)
# Puis on crée la liaison avec le DCAT distant
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteDcatDataset')
RemoteDataset.objects.create(
created_by=dataset.editor,
dataset=dataset,
remote_instance=remote_instance,
remote_dataset=remote_dataset,
)
# Enfin on met à jour le jeu de données et on le synchronize avec DCAT
DataType = apps.get_model(app_label='idgo_admin', model_name='DataType')
dataset.data_type = DataType.objects.filter(slug='donnees-moissonnees')
dataset.save(current_user=None, synchronize=True)
return dataset
def filter(self, **kwargs):
remote_instance = kwargs.pop('remote_instance', None)
remote_dataset = kwargs.pop('remote_dataset', None)
kvp = clean_my_obj({
'remote_instance': remote_instance,
'remote_dataset': remote_dataset,
})
if kvp:
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteDcatDataset')
return Dataset.objects.filter(id__in=[
entry.dataset.id for entry in RemoteDataset.objects.filter(**kvp)])
return super().filter(**kwargs)
def get(self, **kwargs):
remote_dataset = kwargs.pop('remote_dataset', None)
if remote_dataset:
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteDcatDataset')
return RemoteDataset.objects.get(remote_dataset=remote_dataset).dataset
return super().get(**kwargs)
def get_queryset(self, **kwargs):
Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteDcatDataset')
return Dataset.objects.filter(
id__in=[entry.dataset.id for entry in RemoteDataset.objects.all()])
def update_or_create(self, **kwargs):
remote_dataset = kwargs.get('remote_dataset', None)
RemoteDataset = apps.get_model(app_label='idgo_admin', model_name='RemoteDcatDataset')
try:
dataset = self.get(remote_dataset=remote_dataset)
except RemoteDataset.DoesNotExist:
dataset = self.create(**kwargs)
created = True
else:
created = False
harvested = RemoteDataset.objects.get(dataset=dataset)
harvested.updated_on = timezone.now()
# harvested.remote_organisation = kwargs.pop('remote_organisation', None)
harvested.save()
for k, v in kwargs.items():
setattr(dataset, k, v)
dataset.save(current_user=None, synchronize=True)
return dataset, created
# =====================================================
# Définition de Managers pour les ressources (Resource)
# =====================================================
class DefaultResourceManager(models.Manager):
def create(self, **kwargs):
save_opts = kwargs.pop('save_opts', {})
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db, **save_opts)
return obj
def get(self, **kwargs):
return super().get(**kwargs)
# ====================================================
# Définition de Managers pour les couches SIG (Layers)
# ====================================================
class RasterLayerManager(models.Manager):
def create(self, **kwargs):
save_opts = kwargs.pop('save_opts', {})
kwargs['type'] = 'raster'
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db, **save_opts)
return obj
def get(self, **kwargs):
return super().get(**kwargs)
class VectorLayerManager(models.Manager):
def create(self, **kwargs):
save_opts = kwargs.pop('save_opts', {})
kwargs['type'] = 'vector'
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db, **save_opts)
return obj
def get(self, **kwargs):
return super().get(**kwargs)
| 39.014164
| 98
| 0.64515
| 1,577
| 13,772
| 5.438174
| 0.123653
| 0.077309
| 0.040578
| 0.050723
| 0.867421
| 0.855877
| 0.855877
| 0.855877
| 0.847248
| 0.841884
| 0
| 0.001129
| 0.228435
| 13,772
| 352
| 99
| 39.125
| 0.805948
| 0.130192
| 0
| 0.876068
| 0
| 0
| 0.111437
| 0.003851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106838
| false
| 0
| 0.021368
| 0.021368
| 0.290598
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a24fcf8142810c753683155114540fb1b54fa030
| 51,677
|
py
|
Python
|
death-star-bench/socialNetwork/gen-py/social_network/ComposePostService.py
|
usmanager/microservices
|
56d2f639617d7e2122b602067e18240d91529284
|
[
"MIT"
] | null | null | null |
death-star-bench/socialNetwork/gen-py/social_network/ComposePostService.py
|
usmanager/microservices
|
56d2f639617d7e2122b602067e18240d91529284
|
[
"MIT"
] | null | null | null |
death-star-bench/socialNetwork/gen-py/social_network/ComposePostService.py
|
usmanager/microservices
|
56d2f639617d7e2122b602067e18240d91529284
|
[
"MIT"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def UploadText(self, req_id, text, carrier):
"""
Parameters:
- req_id
- text
- carrier
"""
pass
def UploadMedia(self, req_id, media, carrier):
"""
Parameters:
- req_id
- media
- carrier
"""
pass
def UploadUniqueId(self, req_id, post_id, post_type, carrier):
"""
Parameters:
- req_id
- post_id
- post_type
- carrier
"""
pass
def UploadCreator(self, req_id, creator, carrier):
"""
Parameters:
- req_id
- creator
- carrier
"""
pass
def UploadUrls(self, req_id, urls, carrier):
"""
Parameters:
- req_id
- urls
- carrier
"""
pass
def UploadUserMentions(self, req_id, user_mentions, carrier):
"""
Parameters:
- req_id
- user_mentions
- carrier
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def UploadText(self, req_id, text, carrier):
"""
Parameters:
- req_id
- text
- carrier
"""
self.send_UploadText(req_id, text, carrier)
self.recv_UploadText()
def send_UploadText(self, req_id, text, carrier):
self._oprot.writeMessageBegin('UploadText', TMessageType.CALL, self._seqid)
args = UploadText_args()
args.req_id = req_id
args.text = text
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadText(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadText_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def UploadMedia(self, req_id, media, carrier):
"""
Parameters:
- req_id
- media
- carrier
"""
self.send_UploadMedia(req_id, media, carrier)
self.recv_UploadMedia()
def send_UploadMedia(self, req_id, media, carrier):
self._oprot.writeMessageBegin('UploadMedia', TMessageType.CALL, self._seqid)
args = UploadMedia_args()
args.req_id = req_id
args.media = media
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadMedia(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadMedia_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def UploadUniqueId(self, req_id, post_id, post_type, carrier):
"""
Parameters:
- req_id
- post_id
- post_type
- carrier
"""
self.send_UploadUniqueId(req_id, post_id, post_type, carrier)
self.recv_UploadUniqueId()
def send_UploadUniqueId(self, req_id, post_id, post_type, carrier):
self._oprot.writeMessageBegin('UploadUniqueId', TMessageType.CALL, self._seqid)
args = UploadUniqueId_args()
args.req_id = req_id
args.post_id = post_id
args.post_type = post_type
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadUniqueId(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadUniqueId_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def UploadCreator(self, req_id, creator, carrier):
"""
Parameters:
- req_id
- creator
- carrier
"""
self.send_UploadCreator(req_id, creator, carrier)
self.recv_UploadCreator()
def send_UploadCreator(self, req_id, creator, carrier):
self._oprot.writeMessageBegin('UploadCreator', TMessageType.CALL, self._seqid)
args = UploadCreator_args()
args.req_id = req_id
args.creator = creator
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadCreator(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadCreator_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def UploadUrls(self, req_id, urls, carrier):
"""
Parameters:
- req_id
- urls
- carrier
"""
self.send_UploadUrls(req_id, urls, carrier)
self.recv_UploadUrls()
def send_UploadUrls(self, req_id, urls, carrier):
self._oprot.writeMessageBegin('UploadUrls', TMessageType.CALL, self._seqid)
args = UploadUrls_args()
args.req_id = req_id
args.urls = urls
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadUrls(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadUrls_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
def UploadUserMentions(self, req_id, user_mentions, carrier):
"""
Parameters:
- req_id
- user_mentions
- carrier
"""
self.send_UploadUserMentions(req_id, user_mentions, carrier)
self.recv_UploadUserMentions()
def send_UploadUserMentions(self, req_id, user_mentions, carrier):
self._oprot.writeMessageBegin('UploadUserMentions', TMessageType.CALL, self._seqid)
args = UploadUserMentions_args()
args.req_id = req_id
args.user_mentions = user_mentions
args.carrier = carrier
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UploadUserMentions(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UploadUserMentions_result()
result.read(iprot)
iprot.readMessageEnd()
if result.se is not None:
raise result.se
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["UploadText"] = Processor.process_UploadText
self._processMap["UploadMedia"] = Processor.process_UploadMedia
self._processMap["UploadUniqueId"] = Processor.process_UploadUniqueId
self._processMap["UploadCreator"] = Processor.process_UploadCreator
self._processMap["UploadUrls"] = Processor.process_UploadUrls
self._processMap["UploadUserMentions"] = Processor.process_UploadUserMentions
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_UploadText(self, seqid, iprot, oprot):
args = UploadText_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadText_result()
try:
self._handler.UploadText(args.req_id, args.text, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadText", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UploadMedia(self, seqid, iprot, oprot):
args = UploadMedia_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadMedia_result()
try:
self._handler.UploadMedia(args.req_id, args.media, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadMedia", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UploadUniqueId(self, seqid, iprot, oprot):
args = UploadUniqueId_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadUniqueId_result()
try:
self._handler.UploadUniqueId(args.req_id, args.post_id, args.post_type, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadUniqueId", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UploadCreator(self, seqid, iprot, oprot):
args = UploadCreator_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadCreator_result()
try:
self._handler.UploadCreator(args.req_id, args.creator, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadCreator", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UploadUrls(self, seqid, iprot, oprot):
args = UploadUrls_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadUrls_result()
try:
self._handler.UploadUrls(args.req_id, args.urls, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadUrls", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UploadUserMentions(self, seqid, iprot, oprot):
args = UploadUserMentions_args()
args.read(iprot)
iprot.readMessageEnd()
result = UploadUserMentions_result()
try:
self._handler.UploadUserMentions(args.req_id, args.user_mentions, args.carrier)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as se:
msg_type = TMessageType.REPLY
result.se = se
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UploadUserMentions", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class UploadText_args(object):
"""
Attributes:
- req_id
- text
- carrier
"""
def __init__(self, req_id=None, text=None, carrier=None,):
self.req_id = req_id
self.text = text
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.text = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype94, _vtype95, _size93) = iprot.readMapBegin()
for _i97 in range(_size93):
_key98 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val99 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key98] = _val99
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadText_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.text is not None:
oprot.writeFieldBegin('text', TType.STRING, 2)
oprot.writeString(self.text.encode('utf-8') if sys.version_info[0] == 2 else self.text)
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter100, viter101 in self.carrier.items():
oprot.writeString(kiter100.encode('utf-8') if sys.version_info[0] == 2 else kiter100)
oprot.writeString(viter101.encode('utf-8') if sys.version_info[0] == 2 else viter101)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadText_args)
UploadText_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.STRING, 'text', 'UTF8', None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class UploadText_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadText_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadText_result)
UploadText_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class UploadMedia_args(object):
"""
Attributes:
- req_id
- media
- carrier
"""
def __init__(self, req_id=None, media=None, carrier=None,):
self.req_id = req_id
self.media = media
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.media = []
(_etype105, _size102) = iprot.readListBegin()
for _i106 in range(_size102):
_elem107 = Media()
_elem107.read(iprot)
self.media.append(_elem107)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype109, _vtype110, _size108) = iprot.readMapBegin()
for _i112 in range(_size108):
_key113 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val114 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key113] = _val114
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadMedia_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.media is not None:
oprot.writeFieldBegin('media', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.media))
for iter115 in self.media:
iter115.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter116, viter117 in self.carrier.items():
oprot.writeString(kiter116.encode('utf-8') if sys.version_info[0] == 2 else kiter116)
oprot.writeString(viter117.encode('utf-8') if sys.version_info[0] == 2 else viter117)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadMedia_args)
UploadMedia_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.LIST, 'media', (TType.STRUCT, [Media, None], False), None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class UploadMedia_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadMedia_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadMedia_result)
UploadMedia_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class UploadUniqueId_args(object):
"""
Attributes:
- req_id
- post_id
- post_type
- carrier
"""
def __init__(self, req_id=None, post_id=None, post_type=None, carrier=None,):
self.req_id = req_id
self.post_id = post_id
self.post_type = post_type
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.post_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.post_type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.carrier = {}
(_ktype119, _vtype120, _size118) = iprot.readMapBegin()
for _i122 in range(_size118):
_key123 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val124 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key123] = _val124
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUniqueId_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.post_id is not None:
oprot.writeFieldBegin('post_id', TType.I64, 2)
oprot.writeI64(self.post_id)
oprot.writeFieldEnd()
if self.post_type is not None:
oprot.writeFieldBegin('post_type', TType.I32, 3)
oprot.writeI32(self.post_type)
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter125, viter126 in self.carrier.items():
oprot.writeString(kiter125.encode('utf-8') if sys.version_info[0] == 2 else kiter125)
oprot.writeString(viter126.encode('utf-8') if sys.version_info[0] == 2 else viter126)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUniqueId_args)
UploadUniqueId_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.I64, 'post_id', None, None, ), # 2
(3, TType.I32, 'post_type', None, None, ), # 3
(4, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 4
)
class UploadUniqueId_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUniqueId_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUniqueId_result)
UploadUniqueId_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class UploadCreator_args(object):
"""
Attributes:
- req_id
- creator
- carrier
"""
def __init__(self, req_id=None, creator=None, carrier=None,):
self.req_id = req_id
self.creator = creator
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.creator = Creator()
self.creator.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype128, _vtype129, _size127) = iprot.readMapBegin()
for _i131 in range(_size127):
_key132 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val133 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key132] = _val133
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadCreator_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.creator is not None:
oprot.writeFieldBegin('creator', TType.STRUCT, 2)
self.creator.write(oprot)
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter134, viter135 in self.carrier.items():
oprot.writeString(kiter134.encode('utf-8') if sys.version_info[0] == 2 else kiter134)
oprot.writeString(viter135.encode('utf-8') if sys.version_info[0] == 2 else viter135)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadCreator_args)
UploadCreator_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.STRUCT, 'creator', [Creator, None], None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class UploadCreator_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadCreator_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadCreator_result)
UploadCreator_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class UploadUrls_args(object):
"""
Attributes:
- req_id
- urls
- carrier
"""
def __init__(self, req_id=None, urls=None, carrier=None,):
self.req_id = req_id
self.urls = urls
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.urls = []
(_etype139, _size136) = iprot.readListBegin()
for _i140 in range(_size136):
_elem141 = Url()
_elem141.read(iprot)
self.urls.append(_elem141)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype143, _vtype144, _size142) = iprot.readMapBegin()
for _i146 in range(_size142):
_key147 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val148 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key147] = _val148
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUrls_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.urls is not None:
oprot.writeFieldBegin('urls', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.urls))
for iter149 in self.urls:
iter149.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter150, viter151 in self.carrier.items():
oprot.writeString(kiter150.encode('utf-8') if sys.version_info[0] == 2 else kiter150)
oprot.writeString(viter151.encode('utf-8') if sys.version_info[0] == 2 else viter151)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUrls_args)
UploadUrls_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.LIST, 'urls', (TType.STRUCT, [Url, None], False), None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class UploadUrls_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUrls_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUrls_result)
UploadUrls_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
class UploadUserMentions_args(object):
"""
Attributes:
- req_id
- user_mentions
- carrier
"""
def __init__(self, req_id=None, user_mentions=None, carrier=None,):
self.req_id = req_id
self.user_mentions = user_mentions
self.carrier = carrier
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.req_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.user_mentions = []
(_etype155, _size152) = iprot.readListBegin()
for _i156 in range(_size152):
_elem157 = UserMention()
_elem157.read(iprot)
self.user_mentions.append(_elem157)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.carrier = {}
(_ktype159, _vtype160, _size158) = iprot.readMapBegin()
for _i162 in range(_size158):
_key163 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val164 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.carrier[_key163] = _val164
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUserMentions_args')
if self.req_id is not None:
oprot.writeFieldBegin('req_id', TType.I64, 1)
oprot.writeI64(self.req_id)
oprot.writeFieldEnd()
if self.user_mentions is not None:
oprot.writeFieldBegin('user_mentions', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.user_mentions))
for iter165 in self.user_mentions:
iter165.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.carrier is not None:
oprot.writeFieldBegin('carrier', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier))
for kiter166, viter167 in self.carrier.items():
oprot.writeString(kiter166.encode('utf-8') if sys.version_info[0] == 2 else kiter166)
oprot.writeString(viter167.encode('utf-8') if sys.version_info[0] == 2 else viter167)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUserMentions_args)
UploadUserMentions_args.thrift_spec = (
None, # 0
(1, TType.I64, 'req_id', None, None, ), # 1
(2, TType.LIST, 'user_mentions', (TType.STRUCT, [UserMention, None], False), None, ), # 2
(3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3
)
class UploadUserMentions_result(object):
"""
Attributes:
- se
"""
def __init__(self, se=None,):
self.se = se
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.se = ServiceException()
self.se.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UploadUserMentions_result')
if self.se is not None:
oprot.writeFieldBegin('se', TType.STRUCT, 1)
self.se.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UploadUserMentions_result)
UploadUserMentions_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| 34.68255
| 134
| 0.573795
| 5,452
| 51,677
| 5.214417
| 0.046772
| 0.018995
| 0.025326
| 0.023427
| 0.826515
| 0.796616
| 0.777973
| 0.754898
| 0.753562
| 0.739597
| 0
| 0.017979
| 0.321923
| 51,677
| 1,489
| 135
| 34.705843
| 0.793328
| 0.022563
| 0
| 0.763339
| 1
| 0
| 0.031363
| 0.001392
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100688
| false
| 0.005164
| 0.006885
| 0.030981
| 0.189329
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a279ef90e3d6ce8e8643fbf5f2638ecbd362bbd2
| 4,942
|
py
|
Python
|
awx/main/migrations/0021_v330_declare_new_rbac_roles.py
|
alexander-bauer/awx
|
d1319b739406dad988f97c41cb92093f180ba822
|
[
"Apache-2.0"
] | 1
|
2018-02-25T17:56:18.000Z
|
2018-02-25T17:56:18.000Z
|
awx/main/migrations/0021_v330_declare_new_rbac_roles.py
|
alexander-bauer/awx
|
d1319b739406dad988f97c41cb92093f180ba822
|
[
"Apache-2.0"
] | null | null | null |
awx/main/migrations/0021_v330_declare_new_rbac_roles.py
|
alexander-bauer/awx
|
d1319b739406dad988f97c41cb92093f180ba822
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-02-01 16:32
from __future__ import unicode_literals
import awx.main.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0020_v330_instancegroup_policies'),
]
operations = [
migrations.AddField(
model_name='organization',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='credential_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='inventory_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='project_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='workflow_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='notification_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='credential',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='inventory',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='project',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='jobtemplate',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.project_admin_role', b'inventory.organization.inventory_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='jobtemplate',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='organization',
name='member_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'execute_role'], related_name='+', to='main.Role'),
),
]
| 55.52809
| 291
| 0.669769
| 580
| 4,942
| 5.501724
| 0.113793
| 0.081793
| 0.061109
| 0.103416
| 0.851144
| 0.819179
| 0.806957
| 0.796929
| 0.796929
| 0.796929
| 0
| 0.005993
| 0.189599
| 4,942
| 88
| 292
| 56.159091
| 0.790762
| 0.01376
| 0
| 0.654321
| 1
| 0
| 0.241634
| 0.101417
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.061728
| 0
| 0.098765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2a49283ca6d54caad8882007dc667209547bf70
| 26
|
py
|
Python
|
pythran/tests/user_defined_import/global_init.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 1,647
|
2015-01-13T01:45:38.000Z
|
2022-03-28T01:23:41.000Z
|
pythran/tests/user_defined_import/global_init.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 1,116
|
2015-01-01T09:52:05.000Z
|
2022-03-18T21:06:40.000Z
|
pythran/tests/user_defined_import/global_init.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 180
|
2015-02-12T02:47:28.000Z
|
2022-03-14T10:28:18.000Z
|
def aa():
return 3.14
| 8.666667
| 15
| 0.538462
| 5
| 26
| 2.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0.307692
| 26
| 2
| 16
| 13
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0c34681116d5cace69d27781af77c94b765b6dc3
| 179
|
py
|
Python
|
src/causal2020/graphs/sob/__init__.py
|
hassanobeid1994/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | null | null | null |
src/causal2020/graphs/sob/__init__.py
|
hassanobeid1994/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | 89
|
2020-02-10T02:52:11.000Z
|
2020-06-23T03:50:27.000Z
|
src/causal2020/graphs/sob/__init__.py
|
hassan-obeid/tr_b_causal_2020
|
1ffaeb7dcefccf5e1f24c459e9a2f140b2a052a5
|
[
"MIT"
] | null | null | null |
import causal2020.graphs.sob.da_independent
import causal2020.graphs.sob.da_interacting
import causal2020.graphs.sob.sr2_interacting
import causal2020.graphs.sob.sr3p_interacting
| 35.8
| 45
| 0.888268
| 24
| 179
| 6.458333
| 0.375
| 0.412903
| 0.567742
| 0.645161
| 0.812903
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.044693
| 179
| 4
| 46
| 44.75
| 0.80117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
0c46648d884147a0cedf9b16aedcd7b2c9893412
| 21,482
|
py
|
Python
|
atom/nucleus/python/nucleus_api/api/webhook_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/nucleus/python/nucleus_api/api/webhook_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/nucleus/python/nucleus_api/api/webhook_api.py
|
ShekharPaatni/SDK
|
6534ffdb63af87c02c431df9add05a90370183cb
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
"""
Hydrogen Nucleus API
The Hydrogen Nucleus API # noqa: E501
OpenAPI spec version: 1.9.4
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class WebhookApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_webhook_using_post(self, webhook_request, **kwargs): # noqa: E501
"""Create a webhook # noqa: E501
One active webhook service is allowed at all times. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_webhook_using_post(webhook_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Webhook webhook_request: webhookRequest (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_webhook_using_post_with_http_info(webhook_request, **kwargs) # noqa: E501
else:
(data) = self.create_webhook_using_post_with_http_info(webhook_request, **kwargs) # noqa: E501
return data
def create_webhook_using_post_with_http_info(self, webhook_request, **kwargs): # noqa: E501
"""Create a webhook # noqa: E501
One active webhook service is allowed at all times. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_webhook_using_post_with_http_info(webhook_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Webhook webhook_request: webhookRequest (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_webhook_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_request' is set
if self.api_client.client_side_validation and ('webhook_request' not in params or
params['webhook_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `webhook_request` when calling `create_webhook_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'webhook_request' in params:
body_params = params['webhook_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/webhook', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Webhook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_webhook_using_delete(self, webhook_id, **kwargs): # noqa: E501
"""Delete a webhook # noqa: E501
Permanently delete a webhook for your firm. The webhook_id must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_webhook_using_delete(webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str webhook_id: UUID webhook_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_webhook_using_delete_with_http_info(webhook_id, **kwargs) # noqa: E501
else:
(data) = self.delete_webhook_using_delete_with_http_info(webhook_id, **kwargs) # noqa: E501
return data
def delete_webhook_using_delete_with_http_info(self, webhook_id, **kwargs): # noqa: E501
"""Delete a webhook # noqa: E501
Permanently delete a webhook for your firm. The webhook_id must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_webhook_using_delete_with_http_info(webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str webhook_id: UUID webhook_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_webhook_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_id' is set
if self.api_client.client_side_validation and ('webhook_id' not in params or
params['webhook_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `webhook_id` when calling `delete_webhook_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/webhook/{webhook_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_webhook_all_using_get(self, **kwargs): # noqa: E501
"""List all webhooks # noqa: E501
Get information for all webhooks defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_webhook_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageWebhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_webhook_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_webhook_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_webhook_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all webhooks # noqa: E501
Get information for all webhooks defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_webhook_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageWebhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webhook_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/webhook', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageWebhook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_webhook_using_get(self, webhook_id, **kwargs): # noqa: E501
"""Retrieve a webhook # noqa: E501
Retrieve the information for a specific webhook. The webhook_id must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_webhook_using_get(webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str webhook_id: UUID webhook_id (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_webhook_using_get_with_http_info(webhook_id, **kwargs) # noqa: E501
else:
(data) = self.get_webhook_using_get_with_http_info(webhook_id, **kwargs) # noqa: E501
return data
def get_webhook_using_get_with_http_info(self, webhook_id, **kwargs): # noqa: E501
"""Retrieve a webhook # noqa: E501
Retrieve the information for a specific webhook. The webhook_id must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_webhook_using_get_with_http_info(webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str webhook_id: UUID webhook_id (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_webhook_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_id' is set
if self.api_client.client_side_validation and ('webhook_id' not in params or
params['webhook_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `webhook_id` when calling `get_webhook_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/webhook/{webhook_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Webhook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_webhook_using_put(self, webhook, webhook_id, **kwargs): # noqa: E501
"""Update a webhook # noqa: E501
Update a webhook for your firm. The webhook_id must be provided # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_webhook_using_put(webhook, webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object webhook: webhook (required)
:param str webhook_id: UUID webhook_id (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_webhook_using_put_with_http_info(webhook, webhook_id, **kwargs) # noqa: E501
else:
(data) = self.update_webhook_using_put_with_http_info(webhook, webhook_id, **kwargs) # noqa: E501
return data
def update_webhook_using_put_with_http_info(self, webhook, webhook_id, **kwargs): # noqa: E501
"""Update a webhook # noqa: E501
Update a webhook for your firm. The webhook_id must be provided # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_webhook_using_put_with_http_info(webhook, webhook_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object webhook: webhook (required)
:param str webhook_id: UUID webhook_id (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook', 'webhook_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_webhook_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook' is set
if self.api_client.client_side_validation and ('webhook' not in params or
params['webhook'] is None): # noqa: E501
raise ValueError("Missing the required parameter `webhook` when calling `update_webhook_using_put`") # noqa: E501
# verify the required parameter 'webhook_id' is set
if self.api_client.client_side_validation and ('webhook_id' not in params or
params['webhook_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `webhook_id` when calling `update_webhook_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'webhook' in params:
body_params = params['webhook']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/webhook/{webhook_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Webhook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.929368
| 135
| 0.611675
| 2,504
| 21,482
| 4.974441
| 0.072284
| 0.053308
| 0.022479
| 0.028902
| 0.932001
| 0.914338
| 0.896757
| 0.882707
| 0.870022
| 0.863439
| 0
| 0.017833
| 0.300438
| 21,482
| 537
| 136
| 40.003724
| 0.811019
| 0.327949
| 0
| 0.745583
| 1
| 0
| 0.177953
| 0.051091
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038869
| false
| 0
| 0.014134
| 0
| 0.109541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7b7ec6b6f30213f61cb7415cf1f734ded365d15
| 8,757
|
py
|
Python
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases_subaward.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 217
|
2016-11-03T17:09:53.000Z
|
2022-03-10T04:17:54.000Z
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases_subaward.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 622
|
2016-09-02T19:18:23.000Z
|
2022-03-29T17:11:01.000Z
|
usaspending_api/search/tests/integration/hierarchical_filters/test_tas_filter_heirarchical_cases_subaward.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 93
|
2016-09-07T20:28:57.000Z
|
2022-02-25T00:25:27.000Z
|
import pytest
from usaspending_api.search.tests.integration.hierarchical_filters.tas_fixtures import (
BASIC_TAS,
ATA_TAS,
SISTER_TAS,
TAS_DICTIONARIES,
TAS_STRINGS,
)
from usaspending_api.search.tests.integration.hierarchical_filters.tas_search_test_helpers import (
_setup_es,
query_by_tas_subaward,
)
@pytest.mark.django_db
def test_agency_level_require_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_fa_level_require_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_tas_level_require_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_agency_level_exclude_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_agency_path(ATA_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_fa_level_exclude_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_fa_path(ATA_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_tas_level_exclude_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_tas_path(ATA_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_agency_level_require_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_agency_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_fa_level_require_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_fa_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_tas_level_require_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_tas_path(ATA_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_agency_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_fa_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_tas_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_double_require(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_fa_path(BASIC_TAS), _tas_path(BASIC_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_double_exclude(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"exclude": [_fa_path(BASIC_TAS), _tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_exclude_overrides_require(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_tas_path(BASIC_TAS)], "exclude": [_tas_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_exclude_eclipsing_require(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_agency_path(BASIC_TAS)], "exclude": [_fa_path(BASIC_TAS)]})
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_require_eclipsing_exclude(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_fa_path(BASIC_TAS)], "exclude": [_agency_path(BASIC_TAS)]})
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_double_eclipsing_filters(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(
client, {"require": [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)], "exclude": [_fa_path(BASIC_TAS)]}
)
assert resp.json()["results"] == [_subaward1()]
@pytest.mark.django_db
def test_double_eclipsing_filters2(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(
client, {"require": [_fa_path(BASIC_TAS)], "exclude": [_agency_path(BASIC_TAS), _tas_path(BASIC_TAS)]}
)
assert resp.json()["results"] == []
@pytest.mark.django_db
def test_sibling_eclipsing_filters(client, monkeypatch, elasticsearch_award_index, multiple_subawards_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(
client,
{
"require": [_agency_path(BASIC_TAS), _tas_path(ATA_TAS)],
"exclude": [_agency_path(ATA_TAS), _tas_path(BASIC_TAS)],
},
)
assert resp.json()["results"] == [_subaward2()]
@pytest.mark.django_db
def test_sibling_filters_one_match(client, monkeypatch, elasticsearch_award_index, multiple_subawards_with_sibling_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_tas_path(SISTER_TAS[1])]})
assert resp.json()["results"] == [_subaward2()]
@pytest.mark.django_db
def test_sibling_filters_two_matchs(
client, monkeypatch, elasticsearch_award_index, multiple_subawards_with_sibling_tas
):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {"require": [_tas_path(SISTER_TAS[1]), _tas_path(SISTER_TAS[0])]})
assert resp.json()["results"].sort(key=lambda elem: elem["internal_id"]) == [_subaward1(), _subaward2()].sort(
key=lambda elem: elem["internal_id"]
)
def _subaward1():
return {
"Sub-Award ID": "11111",
"internal_id": "11111",
"prime_award_generated_internal_id": "AWARD_1",
"prime_award_internal_id": 1,
}
def _subaward2():
return {
"internal_id": "11111",
"prime_award_internal_id": 2,
"Sub-Award ID": "11111",
"prime_award_generated_internal_id": "AWARD_2",
}
def _agency_path(index):
return [_agency(index)]
def _fa_path(index):
return [_agency(index), _fa(index)]
def _tas_path(index):
return [_agency(index), _fa(index), _tas(index)]
def _agency(index):
return TAS_DICTIONARIES[index]["aid"]
def _fa(index):
return f"{TAS_DICTIONARIES[index]['aid']}-{TAS_DICTIONARIES[index]['main']}"
def _tas(index):
return TAS_STRINGS[index]
def _sort_by_id(dictionary):
dictionary["internal_id"]
| 35.028
| 120
| 0.75197
| 1,118
| 8,757
| 5.412343
| 0.067979
| 0.123616
| 0.218146
| 0.254503
| 0.910924
| 0.897868
| 0.897042
| 0.869939
| 0.846967
| 0.817386
| 0
| 0.005606
| 0.124015
| 8,757
| 249
| 121
| 35.168675
| 0.783209
| 0
| 0
| 0.45122
| 0
| 0
| 0.073541
| 0.020327
| 0
| 0
| 0
| 0
| 0.134146
| 1
| 0.189024
| false
| 0
| 0.018293
| 0.04878
| 0.256098
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7c454851bf6c653632b6767369a07498a18bbbb
| 11,480
|
py
|
Python
|
tests/components/yale_smart_alarm/test_config_flow.py
|
PiotrMachowski/core
|
b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b
|
[
"Apache-2.0"
] | 3
|
2022-01-27T17:00:51.000Z
|
2022-03-09T03:49:03.000Z
|
tests/components/yale_smart_alarm/test_config_flow.py
|
PiotrMachowski/core
|
b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b
|
[
"Apache-2.0"
] | 24
|
2021-11-11T03:58:57.000Z
|
2022-03-31T06:24:13.000Z
|
tests/components/yale_smart_alarm/test_config_flow.py
|
PiotrMachowski/core
|
b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b
|
[
"Apache-2.0"
] | 1
|
2019-02-23T11:27:33.000Z
|
2019-02-23T11:27:33.000Z
|
"""Test the Yale Smart Living config flow."""
from __future__ import annotations
from unittest.mock import patch
import pytest
from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError
from homeassistant import config_entries
from homeassistant.components.yale_smart_alarm.const import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from tests.common import MockConfigEntry
async def test_form(hass: HomeAssistant) -> None:
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
), patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "test-username"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
}
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"sideeffect,p_error",
[
(AuthenticationError, "invalid_auth"),
(ConnectionError, "cannot_connect"),
(TimeoutError, "cannot_connect"),
(UnknownError, "cannot_connect"),
],
)
async def test_form_invalid_auth(
hass: HomeAssistant, sideeffect: Exception, p_error: str
) -> None:
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
side_effect=sideeffect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": p_error}
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
), patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "test-username"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
}
@pytest.mark.parametrize(
"p_input,p_output",
[
(
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
),
(
{
"username": "test-username",
"password": "test-password",
},
{
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
),
],
)
async def test_import_flow_success(
hass, p_input: dict[str, str], p_output: dict[str, str]
):
"""Test a successful import of yaml."""
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
), patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=p_input,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "test-username"
assert result2["data"] == p_output
assert len(mock_setup_entry.mock_calls) == 1
async def test_reauth_flow(hass: HomeAssistant) -> None:
"""Test a reauthentication flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="test-username",
data={
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"unique_id": entry.unique_id,
"entry_id": entry.entry_id,
},
data=entry.data,
)
assert result["step_id"] == "reauth_confirm"
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
) as mock_yale, patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "new-test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_ABORT
assert result2["reason"] == "reauth_successful"
assert entry.data == {
"username": "test-username",
"password": "new-test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
}
assert len(mock_yale.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"sideeffect,p_error",
[
(AuthenticationError, "invalid_auth"),
(ConnectionError, "cannot_connect"),
(TimeoutError, "cannot_connect"),
(UnknownError, "cannot_connect"),
],
)
async def test_reauth_flow_error(
hass: HomeAssistant, sideeffect: Exception, p_error: str
) -> None:
"""Test a reauthentication flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="test-username",
data={
"username": "test-username",
"password": "test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"unique_id": entry.unique_id,
"entry_id": entry.entry_id,
},
data=entry.data,
)
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
side_effect=sideeffect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "wrong-password",
},
)
await hass.async_block_till_done()
assert result2["step_id"] == "reauth_confirm"
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": p_error}
with patch(
"homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient",
return_value="",
), patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "new-test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_ABORT
assert result2["reason"] == "reauth_successful"
assert entry.data == {
"username": "test-username",
"password": "new-test-password",
"name": "Yale Smart Alarm",
"area_id": "1",
}
async def test_options_flow(hass: HomeAssistant) -> None:
"""Test options config flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="test-username",
data={},
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
):
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"code": "123456", "lock_code_digits": 6},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {"code": "123456", "lock_code_digits": 6}
async def test_options_flow_format_mismatch(hass: HomeAssistant) -> None:
"""Test options config flow with a code format mismatch error."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="test-username",
data={},
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.yale_smart_alarm.async_setup_entry",
return_value=True,
):
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"code": "123", "lock_code_digits": 6},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {"base": "code_format_mismatch"}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"code": "123456", "lock_code_digits": 6},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {"code": "123456", "lock_code_digits": 6}
| 30.531915
| 85
| 0.597648
| 1,198
| 11,480
| 5.473289
| 0.094324
| 0.038432
| 0.057648
| 0.060393
| 0.887754
| 0.863657
| 0.863657
| 0.851456
| 0.846424
| 0.815769
| 0
| 0.008786
| 0.27622
| 11,480
| 375
| 86
| 30.613333
| 0.780359
| 0.003397
| 0
| 0.721003
| 0
| 0
| 0.246486
| 0.083356
| 0
| 0
| 0
| 0
| 0.134796
| 1
| 0
| false
| 0.050157
| 0.034483
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ce41236c61f1e17bb4978d16d4262bb0cd82c38a
| 6,642
|
py
|
Python
|
tests/test_static.py
|
dobisel/yhttp
|
4396c03905d71b801a92dead3504cc3ef7d98d79
|
[
"MIT"
] | 10
|
2020-01-30T16:23:28.000Z
|
2021-12-12T23:24:37.000Z
|
tests/test_static.py
|
dobisel/yhttp
|
4396c03905d71b801a92dead3504cc3ef7d98d79
|
[
"MIT"
] | 1
|
2021-07-12T21:07:06.000Z
|
2021-08-08T10:42:27.000Z
|
tests/test_static.py
|
dobisel/yhttp
|
4396c03905d71b801a92dead3504cc3ef7d98d79
|
[
"MIT"
] | 1
|
2020-01-26T13:28:35.000Z
|
2020-01-26T13:28:35.000Z
|
from os import path
from bddrest import status, response, when
def test_staticfile(app, Given, tmpdir):
indexfilename = path.join(tmpdir, 'index.txt')
with open(indexfilename, 'w') as f:
f.write('foo')
app.staticfile(r'/a\.txt', indexfilename)
with Given('/a.txt'):
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
def test_staticdirectory(app, Given, mockupfs):
temproot = mockupfs(**{
'bar': {
'index.txt': 'bar',
},
'index.txt': 'foo',
})
app.staticdirectory('/', temproot)
with Given(''):
assert status == 403
when('/')
assert status == 403
when('/index.txt')
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
when('/bar')
assert status == 403
when('/bar/index.txt')
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'bar'
when('/invalidfile')
assert status == 404
when('/invalid/file')
assert status == 404
when('/invalid/file.html')
assert status == 404
def test_staticdirectory_default_true(app, Given, mockupfs):
temproot = mockupfs(**{
'bar': {
'index.html': 'bar',
},
'index.txt': 'foo',
'index.html': 'foo bar',
})
# indextxtfilename = path.join(tmpdir, 'index.txt')
# with open(indextxtfilename, 'w') as f:
# f.write('foo')
# indexhtmlfilename = path.join(tmpdir, 'index.html')
# with open(indexhtmlfilename, 'w') as f:
# f.write('foo bar')
app.staticdirectory('/', temproot, default=True)
with Given(''):
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/index.txt')
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
when('/bar')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '3'
assert response == 'bar'
when('/bar/')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '3'
assert response == 'bar'
when('/invalidfile')
assert status == 404
when('/invalid/file')
assert status == 404
when('/invalid/file.html')
assert status == 404
def test_staticdirectory_default_filename(app, Given, tmpdir):
indextxtfilename = path.join(tmpdir, 'index.txt')
with open(indextxtfilename, 'w') as f:
f.write('foo')
indexhtmlfilename = path.join(tmpdir, 'index.html')
with open(indexhtmlfilename, 'w') as f:
f.write('foo bar')
app.staticdirectory('/', tmpdir, default='index.txt')
with Given(''):
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
when('/')
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
when('/index.txt')
assert status == 200
assert response.headers['content-type'] == 'text/plain'
assert response.headers['content-length'] == '3'
assert response == 'foo'
when('/index.html')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/invalidfile')
assert status == 404
when('/invalid/file')
assert status == 404
when('/invalid/file.html')
assert status == 404
def test_staticdirectory_fallback_true(app, Given, tmpdir):
indexhtmlfilename = path.join(tmpdir, 'index.html')
with open(indexhtmlfilename, 'w') as f:
f.write('foo bar')
app.staticdirectory('/', tmpdir, default=True, fallback=True)
with Given(''):
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/notexists.html')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
def test_staticdirectory_fallback_file(app, Given, tmpdir):
indexhtmlfilename = path.join(tmpdir, 'index.html')
with open(indexhtmlfilename, 'w') as f:
f.write('foo bar')
fallbackhtmlfilename = path.join(tmpdir, 'fallback.html')
with open(fallbackhtmlfilename, 'w') as f:
f.write('baz')
app.staticdirectory('/', tmpdir, default=True, fallback='fallback.html')
with Given(''):
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/notexists.html')
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '3'
assert response == 'baz'
def test_staticdirectory_fallback_notexistancefile(app, Given, tmpdir):
indexhtmlfilename = path.join(tmpdir, 'index.html')
with open(indexhtmlfilename, 'w') as f:
f.write('foo bar')
app.staticdirectory('/', tmpdir, default=True, fallback='notexists.html')
with Given(''):
assert status == 200
assert response.headers['content-type'] == 'text/html'
assert response.headers['content-length'] == '7'
assert response == 'foo bar'
when('/notexists.html')
assert status == 404
| 29.651786
| 77
| 0.586721
| 713
| 6,642
| 5.441795
| 0.079944
| 0.184021
| 0.184021
| 0.245361
| 0.860567
| 0.85799
| 0.843557
| 0.815206
| 0.815206
| 0.815206
| 0
| 0.021819
| 0.261668
| 6,642
| 223
| 78
| 29.784753
| 0.769372
| 0.033424
| 0
| 0.795031
| 0
| 0
| 0.184781
| 0
| 0
| 0
| 0
| 0
| 0.503106
| 1
| 0.043478
| false
| 0
| 0.012422
| 0
| 0.055901
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
021d4d4d98376883c5564272ec41b05026b35f56
| 145
|
py
|
Python
|
SBaaS_physiology/stage01_physiology_data_execute.py
|
dmccloskey/SBaaS_physiology
|
2e4e960642d1ba4a0beabddb344dfe9acead8c10
|
[
"MIT"
] | null | null | null |
SBaaS_physiology/stage01_physiology_data_execute.py
|
dmccloskey/SBaaS_physiology
|
2e4e960642d1ba4a0beabddb344dfe9acead8c10
|
[
"MIT"
] | null | null | null |
SBaaS_physiology/stage01_physiology_data_execute.py
|
dmccloskey/SBaaS_physiology
|
2e4e960642d1ba4a0beabddb344dfe9acead8c10
|
[
"MIT"
] | null | null | null |
from .stage01_physiology_data_io import stage01_physiology_data_io
class stage01_physiology_data_execute(stage01_physiology_data_io):
pass;
| 29
| 66
| 0.882759
| 20
| 145
| 5.8
| 0.45
| 0.586207
| 0.724138
| 0.594828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06015
| 0.082759
| 145
| 5
| 67
| 29
| 0.81203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
0230faf745b11198f063d259964971bdfcffc025
| 17,088
|
py
|
Python
|
sdk/python/pulumi_azure/batch/pool.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/batch/pool.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/batch/pool.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Pool(pulumi.CustomResource):
account_name: pulumi.Output[str]
"""
Specifies the name of the Batch account in which the pool will be created. Changing this forces a new resource to be created.
"""
auto_scale: pulumi.Output[dict]
"""
A `auto_scale` block that describes the scale settings when using auto scale.
* `evaluationInterval` (`str`)
* `formula` (`str`)
"""
certificates: pulumi.Output[list]
"""
One or more `certificate` blocks that describe the certificates to be installed on each compute node in the pool.
* `id` (`str`) - The Batch pool ID.
* `storeLocation` (`str`)
* `storeName` (`str`)
* `visibilities` (`list`)
"""
container_configuration: pulumi.Output[dict]
"""
The container configuration used in the pool's VMs.
* `containerRegistries` (`list`)
* `password` (`str`)
* `registryServer` (`str`)
* `userName` (`str`)
* `type` (`str`)
"""
display_name: pulumi.Output[str]
"""
Specifies the display name of the Batch pool.
"""
fixed_scale: pulumi.Output[dict]
"""
A `fixed_scale` block that describes the scale settings when using fixed scale.
* `resizeTimeout` (`str`)
* `targetDedicatedNodes` (`float`)
* `targetLowPriorityNodes` (`float`)
"""
max_tasks_per_node: pulumi.Output[float]
"""
Specifies the maximum number of tasks that can run concurrently on a single compute node in the pool. Defaults to `1`. Changing this forces a new resource to be created.
"""
name: pulumi.Output[str]
"""
Specifies the name of the Batch pool. Changing this forces a new resource to be created.
"""
node_agent_sku_id: pulumi.Output[str]
"""
Specifies the Sku of the node agents that will be created in the Batch pool.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which to create the Batch pool. Changing this forces a new resource to be created.
"""
start_task: pulumi.Output[dict]
"""
A `start_task` block that describes the start task settings for the Batch pool.
* `commandLine` (`str`)
* `environment` (`dict`)
* `maxTaskRetryCount` (`float`)
* `resourceFiles` (`list`)
* `autoStorageContainerName` (`str`)
* `blobPrefix` (`str`)
* `fileMode` (`str`)
* `filePath` (`str`)
* `httpUrl` (`str`)
* `storageContainerUrl` (`str`)
* `userIdentity` (`dict`)
* `autoUser` (`dict`)
* `elevationLevel` (`str`)
* `scope` (`str`)
* `userName` (`str`)
* `waitForSuccess` (`bool`)
"""
stop_pending_resize_operation: pulumi.Output[bool]
storage_image_reference: pulumi.Output[dict]
"""
A `storage_image_reference` for the virtual machines that will compose the Batch pool.
* `id` (`str`) - The Batch pool ID.
* `offer` (`str`)
* `publisher` (`str`)
* `sku` (`str`)
* `version` (`str`)
"""
vm_size: pulumi.Output[str]
"""
Specifies the size of the VM created in the Batch pool.
"""
def __init__(__self__, resource_name, opts=None, account_name=None, auto_scale=None, certificates=None, container_configuration=None, display_name=None, fixed_scale=None, max_tasks_per_node=None, name=None, node_agent_sku_id=None, resource_group_name=None, start_task=None, stop_pending_resize_operation=None, storage_image_reference=None, vm_size=None, __props__=None, __name__=None, __opts__=None):
"""
Manages an Azure Batch pool.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: Specifies the name of the Batch account in which the pool will be created. Changing this forces a new resource to be created.
:param pulumi.Input[dict] auto_scale: A `auto_scale` block that describes the scale settings when using auto scale.
:param pulumi.Input[list] certificates: One or more `certificate` blocks that describe the certificates to be installed on each compute node in the pool.
:param pulumi.Input[dict] container_configuration: The container configuration used in the pool's VMs.
:param pulumi.Input[str] display_name: Specifies the display name of the Batch pool.
:param pulumi.Input[dict] fixed_scale: A `fixed_scale` block that describes the scale settings when using fixed scale.
:param pulumi.Input[float] max_tasks_per_node: Specifies the maximum number of tasks that can run concurrently on a single compute node in the pool. Defaults to `1`. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Batch pool. Changing this forces a new resource to be created.
:param pulumi.Input[str] node_agent_sku_id: Specifies the Sku of the node agents that will be created in the Batch pool.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Batch pool. Changing this forces a new resource to be created.
:param pulumi.Input[dict] start_task: A `start_task` block that describes the start task settings for the Batch pool.
:param pulumi.Input[dict] storage_image_reference: A `storage_image_reference` for the virtual machines that will compose the Batch pool.
:param pulumi.Input[str] vm_size: Specifies the size of the VM created in the Batch pool.
The **auto_scale** object supports the following:
* `evaluationInterval` (`pulumi.Input[str]`)
* `formula` (`pulumi.Input[str]`)
The **certificates** object supports the following:
* `id` (`pulumi.Input[str]`) - The Batch pool ID.
* `storeLocation` (`pulumi.Input[str]`)
* `storeName` (`pulumi.Input[str]`)
* `visibilities` (`pulumi.Input[list]`)
The **container_configuration** object supports the following:
* `containerRegistries` (`pulumi.Input[list]`)
* `password` (`pulumi.Input[str]`)
* `registryServer` (`pulumi.Input[str]`)
* `userName` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **fixed_scale** object supports the following:
* `resizeTimeout` (`pulumi.Input[str]`)
* `targetDedicatedNodes` (`pulumi.Input[float]`)
* `targetLowPriorityNodes` (`pulumi.Input[float]`)
The **start_task** object supports the following:
* `commandLine` (`pulumi.Input[str]`)
* `environment` (`pulumi.Input[dict]`)
* `maxTaskRetryCount` (`pulumi.Input[float]`)
* `resourceFiles` (`pulumi.Input[list]`)
* `autoStorageContainerName` (`pulumi.Input[str]`)
* `blobPrefix` (`pulumi.Input[str]`)
* `fileMode` (`pulumi.Input[str]`)
* `filePath` (`pulumi.Input[str]`)
* `httpUrl` (`pulumi.Input[str]`)
* `storageContainerUrl` (`pulumi.Input[str]`)
* `userIdentity` (`pulumi.Input[dict]`)
* `autoUser` (`pulumi.Input[dict]`)
* `elevationLevel` (`pulumi.Input[str]`)
* `scope` (`pulumi.Input[str]`)
* `userName` (`pulumi.Input[str]`)
* `waitForSuccess` (`pulumi.Input[bool]`)
The **storage_image_reference** object supports the following:
* `id` (`pulumi.Input[str]`) - The Batch pool ID.
* `offer` (`pulumi.Input[str]`)
* `publisher` (`pulumi.Input[str]`)
* `sku` (`pulumi.Input[str]`)
* `version` (`pulumi.Input[str]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/batch_pool.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if account_name is None:
raise TypeError("Missing required property 'account_name'")
__props__['account_name'] = account_name
__props__['auto_scale'] = auto_scale
__props__['certificates'] = certificates
__props__['container_configuration'] = container_configuration
__props__['display_name'] = display_name
__props__['fixed_scale'] = fixed_scale
__props__['max_tasks_per_node'] = max_tasks_per_node
__props__['name'] = name
if node_agent_sku_id is None:
raise TypeError("Missing required property 'node_agent_sku_id'")
__props__['node_agent_sku_id'] = node_agent_sku_id
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['start_task'] = start_task
__props__['stop_pending_resize_operation'] = stop_pending_resize_operation
if storage_image_reference is None:
raise TypeError("Missing required property 'storage_image_reference'")
__props__['storage_image_reference'] = storage_image_reference
if vm_size is None:
raise TypeError("Missing required property 'vm_size'")
__props__['vm_size'] = vm_size
super(Pool, __self__).__init__(
'azure:batch/pool:Pool',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, account_name=None, auto_scale=None, certificates=None, container_configuration=None, display_name=None, fixed_scale=None, max_tasks_per_node=None, name=None, node_agent_sku_id=None, resource_group_name=None, start_task=None, stop_pending_resize_operation=None, storage_image_reference=None, vm_size=None):
"""
Get an existing Pool resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: Specifies the name of the Batch account in which the pool will be created. Changing this forces a new resource to be created.
:param pulumi.Input[dict] auto_scale: A `auto_scale` block that describes the scale settings when using auto scale.
:param pulumi.Input[list] certificates: One or more `certificate` blocks that describe the certificates to be installed on each compute node in the pool.
:param pulumi.Input[dict] container_configuration: The container configuration used in the pool's VMs.
:param pulumi.Input[str] display_name: Specifies the display name of the Batch pool.
:param pulumi.Input[dict] fixed_scale: A `fixed_scale` block that describes the scale settings when using fixed scale.
:param pulumi.Input[float] max_tasks_per_node: Specifies the maximum number of tasks that can run concurrently on a single compute node in the pool. Defaults to `1`. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Batch pool. Changing this forces a new resource to be created.
:param pulumi.Input[str] node_agent_sku_id: Specifies the Sku of the node agents that will be created in the Batch pool.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Batch pool. Changing this forces a new resource to be created.
:param pulumi.Input[dict] start_task: A `start_task` block that describes the start task settings for the Batch pool.
:param pulumi.Input[dict] storage_image_reference: A `storage_image_reference` for the virtual machines that will compose the Batch pool.
:param pulumi.Input[str] vm_size: Specifies the size of the VM created in the Batch pool.
The **auto_scale** object supports the following:
* `evaluationInterval` (`pulumi.Input[str]`)
* `formula` (`pulumi.Input[str]`)
The **certificates** object supports the following:
* `id` (`pulumi.Input[str]`) - The Batch pool ID.
* `storeLocation` (`pulumi.Input[str]`)
* `storeName` (`pulumi.Input[str]`)
* `visibilities` (`pulumi.Input[list]`)
The **container_configuration** object supports the following:
* `containerRegistries` (`pulumi.Input[list]`)
* `password` (`pulumi.Input[str]`)
* `registryServer` (`pulumi.Input[str]`)
* `userName` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **fixed_scale** object supports the following:
* `resizeTimeout` (`pulumi.Input[str]`)
* `targetDedicatedNodes` (`pulumi.Input[float]`)
* `targetLowPriorityNodes` (`pulumi.Input[float]`)
The **start_task** object supports the following:
* `commandLine` (`pulumi.Input[str]`)
* `environment` (`pulumi.Input[dict]`)
* `maxTaskRetryCount` (`pulumi.Input[float]`)
* `resourceFiles` (`pulumi.Input[list]`)
* `autoStorageContainerName` (`pulumi.Input[str]`)
* `blobPrefix` (`pulumi.Input[str]`)
* `fileMode` (`pulumi.Input[str]`)
* `filePath` (`pulumi.Input[str]`)
* `httpUrl` (`pulumi.Input[str]`)
* `storageContainerUrl` (`pulumi.Input[str]`)
* `userIdentity` (`pulumi.Input[dict]`)
* `autoUser` (`pulumi.Input[dict]`)
* `elevationLevel` (`pulumi.Input[str]`)
* `scope` (`pulumi.Input[str]`)
* `userName` (`pulumi.Input[str]`)
* `waitForSuccess` (`pulumi.Input[bool]`)
The **storage_image_reference** object supports the following:
* `id` (`pulumi.Input[str]`) - The Batch pool ID.
* `offer` (`pulumi.Input[str]`)
* `publisher` (`pulumi.Input[str]`)
* `sku` (`pulumi.Input[str]`)
* `version` (`pulumi.Input[str]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/batch_pool.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["account_name"] = account_name
__props__["auto_scale"] = auto_scale
__props__["certificates"] = certificates
__props__["container_configuration"] = container_configuration
__props__["display_name"] = display_name
__props__["fixed_scale"] = fixed_scale
__props__["max_tasks_per_node"] = max_tasks_per_node
__props__["name"] = name
__props__["node_agent_sku_id"] = node_agent_sku_id
__props__["resource_group_name"] = resource_group_name
__props__["start_task"] = start_task
__props__["stop_pending_resize_operation"] = stop_pending_resize_operation
__props__["storage_image_reference"] = storage_image_reference
__props__["vm_size"] = vm_size
return Pool(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 48.135211
| 404
| 0.637055
| 2,003
| 17,088
| 5.217673
| 0.112332
| 0.101043
| 0.083054
| 0.021816
| 0.802985
| 0.780499
| 0.770453
| 0.745479
| 0.726438
| 0.716104
| 0
| 0.000315
| 0.256028
| 17,088
| 354
| 405
| 48.271186
| 0.821757
| 0.469686
| 0
| 0.022222
| 1
| 0
| 0.175005
| 0.041916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0.011111
| 0.066667
| 0.022222
| 0.311111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
023f2bf6eb7d2a1684f316bd6a1e218de1ec7cc2
| 26,231
|
py
|
Python
|
tests/rule_based_profiler/bobby_user_workflow_fixture.py
|
rishabh-bhargava/great_expectations
|
e3ce2d094536a2bc738f92e5686005390d694105
|
[
"Apache-2.0"
] | null | null | null |
tests/rule_based_profiler/bobby_user_workflow_fixture.py
|
rishabh-bhargava/great_expectations
|
e3ce2d094536a2bc738f92e5686005390d694105
|
[
"Apache-2.0"
] | null | null | null |
tests/rule_based_profiler/bobby_user_workflow_fixture.py
|
rishabh-bhargava/great_expectations
|
e3ce2d094536a2bc738f92e5686005390d694105
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
import pytest
from freezegun import freeze_time
from ruamel.yaml import YAML
from great_expectations.core import ExpectationConfiguration, ExpectationSuite
# TODO: Move these fixtures to integration tests
from great_expectations.data_context.util import file_relative_path
@pytest.fixture
@freeze_time("09/26/2019 13:42:41")
def bobby_columnar_table_multi_batch():
"""
# TODO: <Alex>ALEX -- Add DocString</Alex>
"""
verbose_profiler_config_file_path: str = file_relative_path(
__file__, "bobby_user_workflow_verbose_profiler_config.yml"
)
verbose_profiler_config: str
with open(verbose_profiler_config_file_path) as f:
verbose_profiler_config = f.read()
my_row_count_range_rule_expectation_configurations_oneshot_sampling_method: List[
ExpectationConfiguration
] = [
ExpectationConfiguration(
**{
"kwargs": {"min_value": 6179, "max_value": 9821, "mostly": 1.0},
"expectation_type": "expect_table_row_count_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "table.row_count",
"domain_kwargs": {},
},
"num_batches": 2,
},
},
},
),
]
my_column_ranges_rule_expectation_configurations_oneshot_sampling_method: List[
ExpectationConfiguration
] = [
ExpectationConfiguration(
**{
"kwargs": {
"column": "VendorID",
"min_value": 1,
"max_value": 1,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "VendorID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "VendorID",
"min_value": 4,
"max_value": 4,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "VendorID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "passenger_count",
"min_value": -1,
"max_value": 2,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "passenger_count",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "passenger_count",
"min_value": 6,
"max_value": 6,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "passenger_count",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "trip_distance",
"min_value": 0.0,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "trip_distance",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "trip_distance",
"min_value": 10.52,
"max_value": 84.95,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "trip_distance",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "RatecodeID",
"min_value": 1,
"max_value": 1,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "RatecodeID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "RatecodeID",
"min_value": 4,
"max_value": 7,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "RatecodeID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "PULocationID",
"min_value": 1,
"max_value": 1,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "PULocationID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "PULocationID",
"min_value": 265,
"max_value": 265,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "PULocationID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "DOLocationID",
"min_value": 1,
"max_value": 1,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "DOLocationID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "DOLocationID",
"min_value": 265,
"max_value": 265,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "DOLocationID",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "payment_type",
"min_value": 1,
"max_value": 1,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "payment_type",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "payment_type",
"min_value": 4,
"max_value": 4,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "payment_type",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "fare_amount",
"min_value": -92.96,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "fare_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "fare_amount",
"min_value": 0.0,
"max_value": 6689.35,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "fare_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "extra",
"min_value": -83.9,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "extra",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "extra",
"min_value": 1.2,
"max_value": 10.3,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "extra",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "mta_tax",
"min_value": -0.5,
"max_value": -0.5,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "mta_tax",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "mta_tax",
"min_value": 0.0,
"max_value": 86.41,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "mta_tax",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "tip_amount",
"min_value": 0.0,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "tip_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "tip_amount",
"min_value": 9.3,
"max_value": 112.4,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "tip_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "tolls_amount",
"min_value": 0.0,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "tolls_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "tolls_amount",
"min_value": 0.0,
"max_value": 1129.07,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "tolls_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "improvement_surcharge",
"min_value": -0.3,
"max_value": -0.3,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "improvement_surcharge",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "improvement_surcharge",
"min_value": 0.3,
"max_value": 0.3,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "improvement_surcharge",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "total_amount",
"min_value": -90.46,
"max_value": 0.0,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "total_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "total_amount",
"min_value": 0.0,
"max_value": 6264.59,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "total_amount",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "congestion_surcharge",
"min_value": -5.8,
"max_value": 3.3,
"mostly": 1.0,
},
"expectation_type": "expect_column_min_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.min",
"domain_kwargs": {
"column": "congestion_surcharge",
},
},
"num_batches": 2,
},
},
}
),
ExpectationConfiguration(
**{
"kwargs": {
"column": "congestion_surcharge",
"min_value": -3.3,
"max_value": 5.8,
"mostly": 1.0,
},
"expectation_type": "expect_column_max_to_be_between",
"meta": {
"profiler_details": {
"metric_configuration": {
"metric_name": "column.max",
"domain_kwargs": {
"column": "congestion_surcharge",
},
},
"num_batches": 2,
},
},
},
),
]
expectation_configurations: List[ExpectationConfiguration] = []
expectation_configurations.extend(
my_row_count_range_rule_expectation_configurations_oneshot_sampling_method
)
expectation_configurations.extend(
my_column_ranges_rule_expectation_configurations_oneshot_sampling_method
)
expectation_suite_name_oneshot_sampling_method: str = (
"bobby_columnar_table_multi_batch_oneshot_sampling_method"
)
expected_expectation_suite_oneshot_sampling_method: ExpectationSuite = (
ExpectationSuite(
expectation_suite_name=expectation_suite_name_oneshot_sampling_method
)
)
expectation_configuration: ExpectationConfiguration
for expectation_configuration in expectation_configurations:
expected_expectation_suite_oneshot_sampling_method.add_expectation(
expectation_configuration
)
yaml = YAML()
profiler_config: dict = yaml.load(verbose_profiler_config)
expected_expectation_suite_oneshot_sampling_method.add_citation(
comment="Suite created by Rule-Based Profiler with the following config",
profiler_config=profiler_config,
)
return {
"profiler_config": verbose_profiler_config,
"test_configuration_oneshot_sampling_method": {
"expectation_suite_name": expectation_suite_name_oneshot_sampling_method,
"expected_expectation_suite": expected_expectation_suite_oneshot_sampling_method,
},
}
| 35.021362
| 93
| 0.342267
| 1,483
| 26,231
| 5.661497
| 0.101146
| 0.085755
| 0.029538
| 0.070152
| 0.866127
| 0.850405
| 0.821105
| 0.804669
| 0.798833
| 0.762863
| 0
| 0.021132
| 0.558004
| 26,231
| 748
| 94
| 35.068182
| 0.703036
| 0.003431
| 0
| 0.636115
| 0
| 0
| 0.245014
| 0.047579
| 0
| 0
| 0
| 0.002674
| 0
| 1
| 0.001368
| false
| 0.005472
| 0.008208
| 0
| 0.010944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a15fa0e92ed9fa30772c7edc80924f4a5f281ef
| 4,799
|
py
|
Python
|
resources/plans.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 1
|
2021-03-13T16:04:54.000Z
|
2021-03-13T16:04:54.000Z
|
resources/plans.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 7
|
2021-07-21T12:42:39.000Z
|
2022-01-06T10:34:04.000Z
|
resources/plans.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 2
|
2021-06-22T08:10:48.000Z
|
2021-09-01T09:16:41.000Z
|
import requests
import json
from .helper import Helper
class Plans(Helper):
def __init__(self, base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination):
super().__init__(base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination)
def get_plans_list_action(self, project_pk):
""" Get data needed to perfom actions
Keyword arguments:
project_pk -- the pk of the project
"""
route = 'v1/plans/list-action/{0}/'.format(project_pk)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def create_plans_list_action(self, project_pk):
""" Create plans list action
Keyword arguments:
project_pk -- the pk of the project
"""
route = 'v1/plans/list-action/{0}/'.format(project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_plans_list(self, project_pk, page=1):
""" Get plans list
Keyword arguments:
project_pk -- the pk of the project
"""
route = 'v1/plans/list/{0}/?page_size={1}&page={2}'.format(project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_plan(self, project_pk, data):
""" Create plan
Keyword arguments:
project_pk -- the pk of the project
data -- data create :
{
"zone": 0,
"name_fr": "string",
"name_en": "string",
"plan_format": "string",
"scale": "string",
"level": "string",
"lot": 0,
"is_default": true,
"progress": 0,
"sub_zone_code": "string",
"plan_code": "string",
"project": 0,
"area": 0,
"code": "string", ("pln", "det", "elv", "sec")
"custom_field_1": "string",
"custom_field_2": "string",
"custom_field_3": "string",
"org": 0
}
"""
route = 'v1/plans/list/{0}/'.format(project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def get_plan_details(self, plan_pk):
""" Get plans details
Keyword arguments:
plan_pk -- the pk of the plan
"""
route = 'v1/plans/{0}/?plan_phases=true'.format(plan_pk)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def update_plan(self, plan_pk, data):
""" Update plan
Keyword arguments:
plan_pk -- the pk of the plan
data -- data create :
{
"zone": 0,
"name_fr": "string",
"name_en": "string",
"plan_format": "string",
"scale": "string",
"level": "string",
"lot": 0,
"is_default": true,
"progress": 0,
"sub_zone_code": "string",
"plan_code": "string",
"project": 0,
"area": 0,
"code": "string",
"custom_field_1": "string",
"custom_field_2": "string",
"custom_field_3": "string"
}
"""
route = 'v1/plans/{0}/'.format(plan_pk)
response = self.process_request(requests, 'PATCH', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_plan(self, plan_pk):
""" Delete plan
Keyword arguments:
plan_pk -- the pk of the plan
"""
route = 'v1/plans/{0}/'.format(plan_pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_plans_planphases_list(self, project_pk, page=1):
""" Get plans planphases list
Keyword arguments:
project_pk -- the pk of the project
"""
route = 'v1/plans/list/{0}/?page_size={1}&page={2}&plan_phases=true'.format(project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
| 32.869863
| 118
| 0.555532
| 549
| 4,799
| 4.648452
| 0.143898
| 0.068966
| 0.038793
| 0.028213
| 0.862853
| 0.856583
| 0.83464
| 0.833464
| 0.806034
| 0.789577
| 0
| 0.011923
| 0.3184
| 4,799
| 146
| 119
| 32.869863
| 0.768267
| 0.344238
| 0
| 0.421053
| 0
| 0
| 0.098794
| 0.069623
| 0
| 0
| 0
| 0
| 0
| 1
| 0.236842
| false
| 0
| 0.078947
| 0
| 0.552632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
5a31b8fd3699d97f4ab8f2af62858d7d21c3e9a9
| 323,382
|
py
|
Python
|
tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.contact_center_insights_v1.services.contact_center_insights import (
ContactCenterInsightsAsyncClient,
)
from google.cloud.contact_center_insights_v1.services.contact_center_insights import (
ContactCenterInsightsClient,
)
from google.cloud.contact_center_insights_v1.services.contact_center_insights import (
pagers,
)
from google.cloud.contact_center_insights_v1.services.contact_center_insights import (
transports,
)
from google.cloud.contact_center_insights_v1.types import contact_center_insights
from google.cloud.contact_center_insights_v1.types import resources
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ContactCenterInsightsClient._get_default_mtls_endpoint(None) is None
assert (
ContactCenterInsightsClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
ContactCenterInsightsClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
ContactCenterInsightsClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
ContactCenterInsightsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
ContactCenterInsightsClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class", [ContactCenterInsightsClient, ContactCenterInsightsAsyncClient,]
)
def test_contact_center_insights_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "contactcenterinsights.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.ContactCenterInsightsGrpcTransport, "grpc"),
(transports.ContactCenterInsightsGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_contact_center_insights_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class", [ContactCenterInsightsClient, ContactCenterInsightsAsyncClient,]
)
def test_contact_center_insights_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "contactcenterinsights.googleapis.com:443"
def test_contact_center_insights_client_get_transport_class():
transport = ContactCenterInsightsClient.get_transport_class()
available_transports = [
transports.ContactCenterInsightsGrpcTransport,
]
assert transport in available_transports
transport = ContactCenterInsightsClient.get_transport_class("grpc")
assert transport == transports.ContactCenterInsightsGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
ContactCenterInsightsClient,
transports.ContactCenterInsightsGrpcTransport,
"grpc",
),
(
ContactCenterInsightsAsyncClient,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
ContactCenterInsightsClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ContactCenterInsightsClient),
)
@mock.patch.object(
ContactCenterInsightsAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ContactCenterInsightsAsyncClient),
)
def test_contact_center_insights_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ContactCenterInsightsClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ContactCenterInsightsClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
ContactCenterInsightsClient,
transports.ContactCenterInsightsGrpcTransport,
"grpc",
"true",
),
(
ContactCenterInsightsAsyncClient,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
ContactCenterInsightsClient,
transports.ContactCenterInsightsGrpcTransport,
"grpc",
"false",
),
(
ContactCenterInsightsAsyncClient,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
ContactCenterInsightsClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ContactCenterInsightsClient),
)
@mock.patch.object(
ContactCenterInsightsAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(ContactCenterInsightsAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_contact_center_insights_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
ContactCenterInsightsClient,
transports.ContactCenterInsightsGrpcTransport,
"grpc",
),
(
ContactCenterInsightsAsyncClient,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_contact_center_insights_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
ContactCenterInsightsClient,
transports.ContactCenterInsightsGrpcTransport,
"grpc",
),
(
ContactCenterInsightsAsyncClient,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_contact_center_insights_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_contact_center_insights_client_client_options_from_dict():
with mock.patch(
"google.cloud.contact_center_insights_v1.services.contact_center_insights.transports.ContactCenterInsightsGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = ContactCenterInsightsClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_create_conversation(
transport: str = "grpc",
request_type=contact_center_insights.CreateConversationRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706),
expire_time=timestamp_pb2.Timestamp(seconds=751),
)
response = client.create_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
def test_create_conversation_from_dict():
test_create_conversation(request_type=dict)
def test_create_conversation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
client.create_conversation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateConversationRequest()
@pytest.mark.asyncio
async def test_create_conversation_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CreateConversationRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
)
)
response = await client.create_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
@pytest.mark.asyncio
async def test_create_conversation_async_from_dict():
await test_create_conversation_async(request_type=dict)
def test_create_conversation_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateConversationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
call.return_value = resources.Conversation()
client.create_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_conversation_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateConversationRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
await client.create_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_conversation_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_conversation(
parent="parent_value",
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
conversation_id="conversation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].conversation
mock_val = resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
)
assert arg == mock_val
arg = args[0].conversation_id
mock_val = "conversation_id_value"
assert arg == mock_val
def test_create_conversation_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_conversation(
contact_center_insights.CreateConversationRequest(),
parent="parent_value",
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
conversation_id="conversation_id_value",
)
@pytest.mark.asyncio
async def test_create_conversation_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_conversation(
parent="parent_value",
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
conversation_id="conversation_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].conversation
mock_val = resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
)
assert arg == mock_val
arg = args[0].conversation_id
mock_val = "conversation_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_conversation_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_conversation(
contact_center_insights.CreateConversationRequest(),
parent="parent_value",
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
conversation_id="conversation_id_value",
)
def test_update_conversation(
transport: str = "grpc",
request_type=contact_center_insights.UpdateConversationRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706),
expire_time=timestamp_pb2.Timestamp(seconds=751),
)
response = client.update_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
def test_update_conversation_from_dict():
test_update_conversation(request_type=dict)
def test_update_conversation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
client.update_conversation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateConversationRequest()
@pytest.mark.asyncio
async def test_update_conversation_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UpdateConversationRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
)
)
response = await client.update_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
@pytest.mark.asyncio
async def test_update_conversation_async_from_dict():
await test_update_conversation_async(request_type=dict)
def test_update_conversation_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateConversationRequest()
request.conversation.name = "conversation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
call.return_value = resources.Conversation()
client.update_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"conversation.name=conversation.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_conversation_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateConversationRequest()
request.conversation.name = "conversation.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
await client.update_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"conversation.name=conversation.name/value",
) in kw["metadata"]
def test_update_conversation_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_conversation(
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].conversation
mock_val = resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
)
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_conversation_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_conversation(
contact_center_insights.UpdateConversationRequest(),
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_conversation_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_conversation(
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].conversation
mock_val = resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
)
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_conversation_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_conversation(
contact_center_insights.UpdateConversationRequest(),
conversation=resources.Conversation(
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706)
),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_get_conversation(
transport: str = "grpc", request_type=contact_center_insights.GetConversationRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
call_metadata=resources.Conversation.CallMetadata(customer_channel=1706),
expire_time=timestamp_pb2.Timestamp(seconds=751),
)
response = client.get_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
def test_get_conversation_from_dict():
test_get_conversation(request_type=dict)
def test_get_conversation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
client.get_conversation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetConversationRequest()
@pytest.mark.asyncio
async def test_get_conversation_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetConversationRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation(
name="name_value",
language_code="language_code_value",
agent_id="agent_id_value",
medium=resources.Conversation.Medium.PHONE_CALL,
turn_count=1105,
)
)
response = await client.get_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetConversationRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Conversation)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
assert response.agent_id == "agent_id_value"
assert response.medium == resources.Conversation.Medium.PHONE_CALL
assert response.turn_count == 1105
@pytest.mark.asyncio
async def test_get_conversation_async_from_dict():
await test_get_conversation_async(request_type=dict)
def test_get_conversation_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetConversationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
call.return_value = resources.Conversation()
client.get_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_conversation_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetConversationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
await client.get_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_conversation_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_conversation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_conversation_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_conversation(
contact_center_insights.GetConversationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_conversation_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_conversation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Conversation()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Conversation()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_conversation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_conversation_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_conversation(
contact_center_insights.GetConversationRequest(), name="name_value",
)
def test_list_conversations(
transport: str = "grpc",
request_type=contact_center_insights.ListConversationsRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListConversationsResponse(
next_page_token="next_page_token_value",
)
response = client.list_conversations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListConversationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConversationsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_conversations_from_dict():
test_list_conversations(request_type=dict)
def test_list_conversations_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
client.list_conversations()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListConversationsRequest()
@pytest.mark.asyncio
async def test_list_conversations_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ListConversationsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListConversationsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_conversations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListConversationsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConversationsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_conversations_async_from_dict():
await test_list_conversations_async(request_type=dict)
def test_list_conversations_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListConversationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
call.return_value = contact_center_insights.ListConversationsResponse()
client.list_conversations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_conversations_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListConversationsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListConversationsResponse()
)
await client.list_conversations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_conversations_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListConversationsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_conversations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_conversations_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_conversations(
contact_center_insights.ListConversationsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_conversations_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListConversationsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListConversationsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_conversations(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_conversations_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_conversations(
contact_center_insights.ListConversationsRequest(), parent="parent_value",
)
def test_list_conversations_pager():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListConversationsResponse(
conversations=[
resources.Conversation(),
resources.Conversation(),
resources.Conversation(),
],
next_page_token="abc",
),
contact_center_insights.ListConversationsResponse(
conversations=[], next_page_token="def",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(),], next_page_token="ghi",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(), resources.Conversation(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_conversations(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.Conversation) for i in results)
def test_list_conversations_pages():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListConversationsResponse(
conversations=[
resources.Conversation(),
resources.Conversation(),
resources.Conversation(),
],
next_page_token="abc",
),
contact_center_insights.ListConversationsResponse(
conversations=[], next_page_token="def",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(),], next_page_token="ghi",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(), resources.Conversation(),],
),
RuntimeError,
)
pages = list(client.list_conversations(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_conversations_async_pager():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListConversationsResponse(
conversations=[
resources.Conversation(),
resources.Conversation(),
resources.Conversation(),
],
next_page_token="abc",
),
contact_center_insights.ListConversationsResponse(
conversations=[], next_page_token="def",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(),], next_page_token="ghi",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(), resources.Conversation(),],
),
RuntimeError,
)
async_pager = await client.list_conversations(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.Conversation) for i in responses)
@pytest.mark.asyncio
async def test_list_conversations_async_pages():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_conversations),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListConversationsResponse(
conversations=[
resources.Conversation(),
resources.Conversation(),
resources.Conversation(),
],
next_page_token="abc",
),
contact_center_insights.ListConversationsResponse(
conversations=[], next_page_token="def",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(),], next_page_token="ghi",
),
contact_center_insights.ListConversationsResponse(
conversations=[resources.Conversation(), resources.Conversation(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_conversations(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_conversation(
transport: str = "grpc",
request_type=contact_center_insights.DeleteConversationRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteConversationRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_conversation_from_dict():
test_delete_conversation(request_type=dict)
def test_delete_conversation_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
client.delete_conversation()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteConversationRequest()
@pytest.mark.asyncio
async def test_delete_conversation_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.DeleteConversationRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteConversationRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_conversation_async_from_dict():
await test_delete_conversation_async(request_type=dict)
def test_delete_conversation_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteConversationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
call.return_value = None
client.delete_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_conversation_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteConversationRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_conversation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_conversation_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_conversation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_conversation_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_conversation(
contact_center_insights.DeleteConversationRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_conversation_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_conversation), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_conversation(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_conversation_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_conversation(
contact_center_insights.DeleteConversationRequest(), name="name_value",
)
def test_create_analysis(
transport: str = "grpc", request_type=contact_center_insights.CreateAnalysisRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateAnalysisRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_analysis_from_dict():
test_create_analysis(request_type=dict)
def test_create_analysis_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
client.create_analysis()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateAnalysisRequest()
@pytest.mark.asyncio
async def test_create_analysis_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CreateAnalysisRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateAnalysisRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_analysis_async_from_dict():
await test_create_analysis_async(request_type=dict)
def test_create_analysis_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateAnalysisRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_analysis_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateAnalysisRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_analysis_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_analysis(
parent="parent_value", analysis=resources.Analysis(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].analysis
mock_val = resources.Analysis(name="name_value")
assert arg == mock_val
def test_create_analysis_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_analysis(
contact_center_insights.CreateAnalysisRequest(),
parent="parent_value",
analysis=resources.Analysis(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_analysis_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_analysis(
parent="parent_value", analysis=resources.Analysis(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].analysis
mock_val = resources.Analysis(name="name_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_analysis_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_analysis(
contact_center_insights.CreateAnalysisRequest(),
parent="parent_value",
analysis=resources.Analysis(name="name_value"),
)
def test_get_analysis(
transport: str = "grpc", request_type=contact_center_insights.GetAnalysisRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Analysis(name="name_value",)
response = client.get_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetAnalysisRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Analysis)
assert response.name == "name_value"
def test_get_analysis_from_dict():
test_get_analysis(request_type=dict)
def test_get_analysis_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
client.get_analysis()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetAnalysisRequest()
@pytest.mark.asyncio
async def test_get_analysis_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetAnalysisRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Analysis(name="name_value",)
)
response = await client.get_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetAnalysisRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Analysis)
assert response.name == "name_value"
@pytest.mark.asyncio
async def test_get_analysis_async_from_dict():
await test_get_analysis_async(request_type=dict)
def test_get_analysis_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetAnalysisRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
call.return_value = resources.Analysis()
client.get_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_analysis_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetAnalysisRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Analysis())
await client.get_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_analysis_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Analysis()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_analysis(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_analysis_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_analysis(
contact_center_insights.GetAnalysisRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_analysis_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Analysis()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Analysis())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_analysis(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_analysis_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_analysis(
contact_center_insights.GetAnalysisRequest(), name="name_value",
)
def test_list_analyses(
transport: str = "grpc", request_type=contact_center_insights.ListAnalysesRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListAnalysesResponse(
next_page_token="next_page_token_value",
)
response = client.list_analyses(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListAnalysesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAnalysesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_analyses_from_dict():
test_list_analyses(request_type=dict)
def test_list_analyses_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
client.list_analyses()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListAnalysesRequest()
@pytest.mark.asyncio
async def test_list_analyses_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ListAnalysesRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListAnalysesResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_analyses(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListAnalysesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAnalysesAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_analyses_async_from_dict():
await test_list_analyses_async(request_type=dict)
def test_list_analyses_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListAnalysesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
call.return_value = contact_center_insights.ListAnalysesResponse()
client.list_analyses(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_analyses_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListAnalysesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListAnalysesResponse()
)
await client.list_analyses(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_analyses_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListAnalysesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_analyses(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_analyses_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_analyses(
contact_center_insights.ListAnalysesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_analyses_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListAnalysesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListAnalysesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_analyses(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_analyses_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_analyses(
contact_center_insights.ListAnalysesRequest(), parent="parent_value",
)
def test_list_analyses_pager():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListAnalysesResponse(
analyses=[
resources.Analysis(),
resources.Analysis(),
resources.Analysis(),
],
next_page_token="abc",
),
contact_center_insights.ListAnalysesResponse(
analyses=[], next_page_token="def",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(),], next_page_token="ghi",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(), resources.Analysis(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_analyses(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.Analysis) for i in results)
def test_list_analyses_pages():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_analyses), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListAnalysesResponse(
analyses=[
resources.Analysis(),
resources.Analysis(),
resources.Analysis(),
],
next_page_token="abc",
),
contact_center_insights.ListAnalysesResponse(
analyses=[], next_page_token="def",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(),], next_page_token="ghi",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(), resources.Analysis(),],
),
RuntimeError,
)
pages = list(client.list_analyses(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_analyses_async_pager():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_analyses), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListAnalysesResponse(
analyses=[
resources.Analysis(),
resources.Analysis(),
resources.Analysis(),
],
next_page_token="abc",
),
contact_center_insights.ListAnalysesResponse(
analyses=[], next_page_token="def",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(),], next_page_token="ghi",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(), resources.Analysis(),],
),
RuntimeError,
)
async_pager = await client.list_analyses(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.Analysis) for i in responses)
@pytest.mark.asyncio
async def test_list_analyses_async_pages():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_analyses), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListAnalysesResponse(
analyses=[
resources.Analysis(),
resources.Analysis(),
resources.Analysis(),
],
next_page_token="abc",
),
contact_center_insights.ListAnalysesResponse(
analyses=[], next_page_token="def",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(),], next_page_token="ghi",
),
contact_center_insights.ListAnalysesResponse(
analyses=[resources.Analysis(), resources.Analysis(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_analyses(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_analysis(
transport: str = "grpc", request_type=contact_center_insights.DeleteAnalysisRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteAnalysisRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_analysis_from_dict():
test_delete_analysis(request_type=dict)
def test_delete_analysis_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
client.delete_analysis()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteAnalysisRequest()
@pytest.mark.asyncio
async def test_delete_analysis_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.DeleteAnalysisRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteAnalysisRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_analysis_async_from_dict():
await test_delete_analysis_async(request_type=dict)
def test_delete_analysis_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteAnalysisRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
call.return_value = None
client.delete_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_analysis_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteAnalysisRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_analysis_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_analysis(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_analysis_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_analysis(
contact_center_insights.DeleteAnalysisRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_analysis_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_analysis), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_analysis(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_analysis_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_analysis(
contact_center_insights.DeleteAnalysisRequest(), name="name_value",
)
def test_export_insights_data(
transport: str = "grpc",
request_type=contact_center_insights.ExportInsightsDataRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.export_insights_data(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ExportInsightsDataRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_export_insights_data_from_dict():
test_export_insights_data(request_type=dict)
def test_export_insights_data_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
client.export_insights_data()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ExportInsightsDataRequest()
@pytest.mark.asyncio
async def test_export_insights_data_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ExportInsightsDataRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.export_insights_data(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ExportInsightsDataRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_export_insights_data_async_from_dict():
await test_export_insights_data_async(request_type=dict)
def test_export_insights_data_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ExportInsightsDataRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.export_insights_data(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_export_insights_data_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ExportInsightsDataRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.export_insights_data(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_export_insights_data_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.export_insights_data(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_export_insights_data_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.export_insights_data(
contact_center_insights.ExportInsightsDataRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_export_insights_data_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_insights_data), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.export_insights_data(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_export_insights_data_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.export_insights_data(
contact_center_insights.ExportInsightsDataRequest(), parent="parent_value",
)
def test_create_issue_model(
transport: str = "grpc",
request_type=contact_center_insights.CreateIssueModelRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_issue_model_from_dict():
test_create_issue_model(request_type=dict)
def test_create_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
client.create_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateIssueModelRequest()
@pytest.mark.asyncio
async def test_create_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CreateIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreateIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_issue_model_async_from_dict():
await test_create_issue_model_async(request_type=dict)
def test_create_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateIssueModelRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreateIssueModelRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_issue_model(
parent="parent_value", issue_model=resources.IssueModel(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].issue_model
mock_val = resources.IssueModel(name="name_value")
assert arg == mock_val
def test_create_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_issue_model(
contact_center_insights.CreateIssueModelRequest(),
parent="parent_value",
issue_model=resources.IssueModel(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_issue_model(
parent="parent_value", issue_model=resources.IssueModel(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].issue_model
mock_val = resources.IssueModel(name="name_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_issue_model(
contact_center_insights.CreateIssueModelRequest(),
parent="parent_value",
issue_model=resources.IssueModel(name="name_value"),
)
def test_update_issue_model(
transport: str = "grpc",
request_type=contact_center_insights.UpdateIssueModelRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel(
name="name_value",
display_name="display_name_value",
state=resources.IssueModel.State.UNDEPLOYED,
)
response = client.update_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.IssueModel)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == resources.IssueModel.State.UNDEPLOYED
def test_update_issue_model_from_dict():
test_update_issue_model(request_type=dict)
def test_update_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
client.update_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueModelRequest()
@pytest.mark.asyncio
async def test_update_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UpdateIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel(
name="name_value",
display_name="display_name_value",
state=resources.IssueModel.State.UNDEPLOYED,
)
)
response = await client.update_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.IssueModel)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == resources.IssueModel.State.UNDEPLOYED
@pytest.mark.asyncio
async def test_update_issue_model_async_from_dict():
await test_update_issue_model_async(request_type=dict)
def test_update_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateIssueModelRequest()
request.issue_model.name = "issue_model.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
call.return_value = resources.IssueModel()
client.update_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue_model.name=issue_model.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateIssueModelRequest()
request.issue_model.name = "issue_model.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel()
)
await client.update_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue_model.name=issue_model.name/value",) in kw[
"metadata"
]
def test_update_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_issue_model(
issue_model=resources.IssueModel(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].issue_model
mock_val = resources.IssueModel(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_issue_model(
contact_center_insights.UpdateIssueModelRequest(),
issue_model=resources.IssueModel(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_issue_model(
issue_model=resources.IssueModel(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].issue_model
mock_val = resources.IssueModel(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_issue_model(
contact_center_insights.UpdateIssueModelRequest(),
issue_model=resources.IssueModel(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_get_issue_model(
transport: str = "grpc", request_type=contact_center_insights.GetIssueModelRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel(
name="name_value",
display_name="display_name_value",
state=resources.IssueModel.State.UNDEPLOYED,
)
response = client.get_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.IssueModel)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == resources.IssueModel.State.UNDEPLOYED
def test_get_issue_model_from_dict():
test_get_issue_model(request_type=dict)
def test_get_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
client.get_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueModelRequest()
@pytest.mark.asyncio
async def test_get_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel(
name="name_value",
display_name="display_name_value",
state=resources.IssueModel.State.UNDEPLOYED,
)
)
response = await client.get_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.IssueModel)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == resources.IssueModel.State.UNDEPLOYED
@pytest.mark.asyncio
async def test_get_issue_model_async_from_dict():
await test_get_issue_model_async(request_type=dict)
def test_get_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
call.return_value = resources.IssueModel()
client.get_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel()
)
await client.get_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_issue_model(
contact_center_insights.GetIssueModelRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue_model), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.IssueModel()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.IssueModel()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_issue_model(
contact_center_insights.GetIssueModelRequest(), name="name_value",
)
def test_list_issue_models(
transport: str = "grpc", request_type=contact_center_insights.ListIssueModelsRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssueModelsResponse()
response = client.list_issue_models(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssueModelsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.ListIssueModelsResponse)
def test_list_issue_models_from_dict():
test_list_issue_models(request_type=dict)
def test_list_issue_models_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
client.list_issue_models()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssueModelsRequest()
@pytest.mark.asyncio
async def test_list_issue_models_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ListIssueModelsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssueModelsResponse()
)
response = await client.list_issue_models(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssueModelsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.ListIssueModelsResponse)
@pytest.mark.asyncio
async def test_list_issue_models_async_from_dict():
await test_list_issue_models_async(request_type=dict)
def test_list_issue_models_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListIssueModelsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
call.return_value = contact_center_insights.ListIssueModelsResponse()
client.list_issue_models(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_issue_models_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListIssueModelsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssueModelsResponse()
)
await client.list_issue_models(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_issue_models_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssueModelsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_issue_models(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_issue_models_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_issue_models(
contact_center_insights.ListIssueModelsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_issue_models_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_issue_models), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssueModelsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssueModelsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_issue_models(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_issue_models_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_issue_models(
contact_center_insights.ListIssueModelsRequest(), parent="parent_value",
)
def test_delete_issue_model(
transport: str = "grpc",
request_type=contact_center_insights.DeleteIssueModelRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_issue_model_from_dict():
test_delete_issue_model(request_type=dict)
def test_delete_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
client.delete_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteIssueModelRequest()
@pytest.mark.asyncio
async def test_delete_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.DeleteIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeleteIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_issue_model_async_from_dict():
await test_delete_issue_model_async(request_type=dict)
def test_delete_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeleteIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_issue_model(
contact_center_insights.DeleteIssueModelRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_issue_model(
contact_center_insights.DeleteIssueModelRequest(), name="name_value",
)
def test_deploy_issue_model(
transport: str = "grpc",
request_type=contact_center_insights.DeployIssueModelRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.deploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeployIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_deploy_issue_model_from_dict():
test_deploy_issue_model(request_type=dict)
def test_deploy_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
client.deploy_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeployIssueModelRequest()
@pytest.mark.asyncio
async def test_deploy_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.DeployIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.deploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeployIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_deploy_issue_model_async_from_dict():
await test_deploy_issue_model_async(request_type=dict)
def test_deploy_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeployIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.deploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_deploy_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeployIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.deploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_deploy_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.deploy_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_deploy_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.deploy_issue_model(
contact_center_insights.DeployIssueModelRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_deploy_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.deploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.deploy_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_deploy_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.deploy_issue_model(
contact_center_insights.DeployIssueModelRequest(), name="name_value",
)
def test_undeploy_issue_model(
transport: str = "grpc",
request_type=contact_center_insights.UndeployIssueModelRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.undeploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UndeployIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_undeploy_issue_model_from_dict():
test_undeploy_issue_model(request_type=dict)
def test_undeploy_issue_model_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
client.undeploy_issue_model()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UndeployIssueModelRequest()
@pytest.mark.asyncio
async def test_undeploy_issue_model_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UndeployIssueModelRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.undeploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UndeployIssueModelRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_undeploy_issue_model_async_from_dict():
await test_undeploy_issue_model_async(request_type=dict)
def test_undeploy_issue_model_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UndeployIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.undeploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_undeploy_issue_model_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UndeployIssueModelRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.undeploy_issue_model(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_undeploy_issue_model_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.undeploy_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_undeploy_issue_model_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.undeploy_issue_model(
contact_center_insights.UndeployIssueModelRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_undeploy_issue_model_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.undeploy_issue_model), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.undeploy_issue_model(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_undeploy_issue_model_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.undeploy_issue_model(
contact_center_insights.UndeployIssueModelRequest(), name="name_value",
)
def test_get_issue(
transport: str = "grpc", request_type=contact_center_insights.GetIssueRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue(
name="name_value", display_name="display_name_value",
)
response = client.get_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Issue)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
def test_get_issue_from_dict():
test_get_issue(request_type=dict)
def test_get_issue_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
client.get_issue()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueRequest()
@pytest.mark.asyncio
async def test_get_issue_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetIssueRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Issue(name="name_value", display_name="display_name_value",)
)
response = await client.get_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetIssueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Issue)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
@pytest.mark.asyncio
async def test_get_issue_async_from_dict():
await test_get_issue_async(request_type=dict)
def test_get_issue_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetIssueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
call.return_value = resources.Issue()
client.get_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_issue_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetIssueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue())
await client.get_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_issue_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_issue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_issue_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_issue(
contact_center_insights.GetIssueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_issue_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_issue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_issue_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_issue(
contact_center_insights.GetIssueRequest(), name="name_value",
)
def test_list_issues(
transport: str = "grpc", request_type=contact_center_insights.ListIssuesRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssuesResponse()
response = client.list_issues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssuesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.ListIssuesResponse)
def test_list_issues_from_dict():
test_list_issues(request_type=dict)
def test_list_issues_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
client.list_issues()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssuesRequest()
@pytest.mark.asyncio
async def test_list_issues_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ListIssuesRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssuesResponse()
)
response = await client.list_issues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListIssuesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.ListIssuesResponse)
@pytest.mark.asyncio
async def test_list_issues_async_from_dict():
await test_list_issues_async(request_type=dict)
def test_list_issues_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListIssuesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
call.return_value = contact_center_insights.ListIssuesResponse()
client.list_issues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_issues_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListIssuesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssuesResponse()
)
await client.list_issues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_issues_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssuesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_issues(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_issues_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_issues(
contact_center_insights.ListIssuesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_issues_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_issues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListIssuesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListIssuesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_issues(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_issues_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_issues(
contact_center_insights.ListIssuesRequest(), parent="parent_value",
)
def test_update_issue(
transport: str = "grpc", request_type=contact_center_insights.UpdateIssueRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue(
name="name_value", display_name="display_name_value",
)
response = client.update_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Issue)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
def test_update_issue_from_dict():
test_update_issue(request_type=dict)
def test_update_issue_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
client.update_issue()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueRequest()
@pytest.mark.asyncio
async def test_update_issue_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UpdateIssueRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Issue(name="name_value", display_name="display_name_value",)
)
response = await client.update_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateIssueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Issue)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
@pytest.mark.asyncio
async def test_update_issue_async_from_dict():
await test_update_issue_async(request_type=dict)
def test_update_issue_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateIssueRequest()
request.issue.name = "issue.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
call.return_value = resources.Issue()
client.update_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue.name=issue.name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_issue_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateIssueRequest()
request.issue.name = "issue.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue())
await client.update_issue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue.name=issue.name/value",) in kw["metadata"]
def test_update_issue_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_issue(
issue=resources.Issue(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].issue
mock_val = resources.Issue(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_issue_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_issue(
contact_center_insights.UpdateIssueRequest(),
issue=resources.Issue(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_issue_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_issue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Issue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Issue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_issue(
issue=resources.Issue(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].issue
mock_val = resources.Issue(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_issue_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_issue(
contact_center_insights.UpdateIssueRequest(),
issue=resources.Issue(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_calculate_issue_model_stats(
transport: str = "grpc",
request_type=contact_center_insights.CalculateIssueModelStatsRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateIssueModelStatsResponse()
response = client.calculate_issue_model_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest()
# Establish that the response is the type that we expect.
assert isinstance(
response, contact_center_insights.CalculateIssueModelStatsResponse
)
def test_calculate_issue_model_stats_from_dict():
test_calculate_issue_model_stats(request_type=dict)
def test_calculate_issue_model_stats_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
client.calculate_issue_model_stats()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest()
@pytest.mark.asyncio
async def test_calculate_issue_model_stats_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CalculateIssueModelStatsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateIssueModelStatsResponse()
)
response = await client.calculate_issue_model_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateIssueModelStatsRequest()
# Establish that the response is the type that we expect.
assert isinstance(
response, contact_center_insights.CalculateIssueModelStatsResponse
)
@pytest.mark.asyncio
async def test_calculate_issue_model_stats_async_from_dict():
await test_calculate_issue_model_stats_async(request_type=dict)
def test_calculate_issue_model_stats_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CalculateIssueModelStatsRequest()
request.issue_model = "issue_model/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
call.return_value = contact_center_insights.CalculateIssueModelStatsResponse()
client.calculate_issue_model_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue_model=issue_model/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_calculate_issue_model_stats_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CalculateIssueModelStatsRequest()
request.issue_model = "issue_model/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateIssueModelStatsResponse()
)
await client.calculate_issue_model_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "issue_model=issue_model/value",) in kw["metadata"]
def test_calculate_issue_model_stats_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateIssueModelStatsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.calculate_issue_model_stats(issue_model="issue_model_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].issue_model
mock_val = "issue_model_value"
assert arg == mock_val
def test_calculate_issue_model_stats_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.calculate_issue_model_stats(
contact_center_insights.CalculateIssueModelStatsRequest(),
issue_model="issue_model_value",
)
@pytest.mark.asyncio
async def test_calculate_issue_model_stats_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.calculate_issue_model_stats), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateIssueModelStatsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateIssueModelStatsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.calculate_issue_model_stats(
issue_model="issue_model_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].issue_model
mock_val = "issue_model_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_calculate_issue_model_stats_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.calculate_issue_model_stats(
contact_center_insights.CalculateIssueModelStatsRequest(),
issue_model="issue_model_value",
)
def test_create_phrase_matcher(
transport: str = "grpc",
request_type=contact_center_insights.CreatePhraseMatcherRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
response = client.create_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreatePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
def test_create_phrase_matcher_from_dict():
test_create_phrase_matcher(request_type=dict)
def test_create_phrase_matcher_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
client.create_phrase_matcher()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreatePhraseMatcherRequest()
@pytest.mark.asyncio
async def test_create_phrase_matcher_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CreatePhraseMatcherRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
)
response = await client.create_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CreatePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
@pytest.mark.asyncio
async def test_create_phrase_matcher_async_from_dict():
await test_create_phrase_matcher_async(request_type=dict)
def test_create_phrase_matcher_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreatePhraseMatcherRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
call.return_value = resources.PhraseMatcher()
client.create_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_phrase_matcher_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CreatePhraseMatcherRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
await client.create_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_phrase_matcher_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_phrase_matcher(
parent="parent_value",
phrase_matcher=resources.PhraseMatcher(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].phrase_matcher
mock_val = resources.PhraseMatcher(name="name_value")
assert arg == mock_val
def test_create_phrase_matcher_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_phrase_matcher(
contact_center_insights.CreatePhraseMatcherRequest(),
parent="parent_value",
phrase_matcher=resources.PhraseMatcher(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_phrase_matcher_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_phrase_matcher(
parent="parent_value",
phrase_matcher=resources.PhraseMatcher(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].phrase_matcher
mock_val = resources.PhraseMatcher(name="name_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_phrase_matcher_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_phrase_matcher(
contact_center_insights.CreatePhraseMatcherRequest(),
parent="parent_value",
phrase_matcher=resources.PhraseMatcher(name="name_value"),
)
def test_get_phrase_matcher(
transport: str = "grpc",
request_type=contact_center_insights.GetPhraseMatcherRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
response = client.get_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetPhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
def test_get_phrase_matcher_from_dict():
test_get_phrase_matcher(request_type=dict)
def test_get_phrase_matcher_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
client.get_phrase_matcher()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetPhraseMatcherRequest()
@pytest.mark.asyncio
async def test_get_phrase_matcher_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetPhraseMatcherRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
)
response = await client.get_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetPhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
@pytest.mark.asyncio
async def test_get_phrase_matcher_async_from_dict():
await test_get_phrase_matcher_async(request_type=dict)
def test_get_phrase_matcher_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetPhraseMatcherRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
call.return_value = resources.PhraseMatcher()
client.get_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_phrase_matcher_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetPhraseMatcherRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
await client.get_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_phrase_matcher_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_phrase_matcher(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_phrase_matcher_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_phrase_matcher(
contact_center_insights.GetPhraseMatcherRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_phrase_matcher_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_phrase_matcher(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_phrase_matcher_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_phrase_matcher(
contact_center_insights.GetPhraseMatcherRequest(), name="name_value",
)
def test_list_phrase_matchers(
transport: str = "grpc",
request_type=contact_center_insights.ListPhraseMatchersRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListPhraseMatchersResponse(
next_page_token="next_page_token_value",
)
response = client.list_phrase_matchers(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListPhraseMatchersRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPhraseMatchersPager)
assert response.next_page_token == "next_page_token_value"
def test_list_phrase_matchers_from_dict():
test_list_phrase_matchers(request_type=dict)
def test_list_phrase_matchers_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
client.list_phrase_matchers()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListPhraseMatchersRequest()
@pytest.mark.asyncio
async def test_list_phrase_matchers_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.ListPhraseMatchersRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListPhraseMatchersResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_phrase_matchers(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.ListPhraseMatchersRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPhraseMatchersAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_phrase_matchers_async_from_dict():
await test_list_phrase_matchers_async(request_type=dict)
def test_list_phrase_matchers_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListPhraseMatchersRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
call.return_value = contact_center_insights.ListPhraseMatchersResponse()
client.list_phrase_matchers(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_phrase_matchers_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.ListPhraseMatchersRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListPhraseMatchersResponse()
)
await client.list_phrase_matchers(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_phrase_matchers_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListPhraseMatchersResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_phrase_matchers(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_phrase_matchers_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_phrase_matchers(
contact_center_insights.ListPhraseMatchersRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_phrase_matchers_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.ListPhraseMatchersResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.ListPhraseMatchersResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_phrase_matchers(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_phrase_matchers_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_phrase_matchers(
contact_center_insights.ListPhraseMatchersRequest(), parent="parent_value",
)
def test_list_phrase_matchers_pager():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[
resources.PhraseMatcher(),
resources.PhraseMatcher(),
resources.PhraseMatcher(),
],
next_page_token="abc",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[], next_page_token="def",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(),], next_page_token="ghi",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(), resources.PhraseMatcher(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_phrase_matchers(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.PhraseMatcher) for i in results)
def test_list_phrase_matchers_pages():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[
resources.PhraseMatcher(),
resources.PhraseMatcher(),
resources.PhraseMatcher(),
],
next_page_token="abc",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[], next_page_token="def",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(),], next_page_token="ghi",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(), resources.PhraseMatcher(),],
),
RuntimeError,
)
pages = list(client.list_phrase_matchers(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_phrase_matchers_async_pager():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[
resources.PhraseMatcher(),
resources.PhraseMatcher(),
resources.PhraseMatcher(),
],
next_page_token="abc",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[], next_page_token="def",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(),], next_page_token="ghi",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(), resources.PhraseMatcher(),],
),
RuntimeError,
)
async_pager = await client.list_phrase_matchers(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.PhraseMatcher) for i in responses)
@pytest.mark.asyncio
async def test_list_phrase_matchers_async_pages():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_phrase_matchers),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[
resources.PhraseMatcher(),
resources.PhraseMatcher(),
resources.PhraseMatcher(),
],
next_page_token="abc",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[], next_page_token="def",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(),], next_page_token="ghi",
),
contact_center_insights.ListPhraseMatchersResponse(
phrase_matchers=[resources.PhraseMatcher(), resources.PhraseMatcher(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_phrase_matchers(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_phrase_matcher(
transport: str = "grpc",
request_type=contact_center_insights.DeletePhraseMatcherRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeletePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_phrase_matcher_from_dict():
test_delete_phrase_matcher(request_type=dict)
def test_delete_phrase_matcher_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
client.delete_phrase_matcher()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeletePhraseMatcherRequest()
@pytest.mark.asyncio
async def test_delete_phrase_matcher_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.DeletePhraseMatcherRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.DeletePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_phrase_matcher_async_from_dict():
await test_delete_phrase_matcher_async(request_type=dict)
def test_delete_phrase_matcher_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeletePhraseMatcherRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
call.return_value = None
client.delete_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_phrase_matcher_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.DeletePhraseMatcherRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_phrase_matcher_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_phrase_matcher(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_phrase_matcher_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_phrase_matcher(
contact_center_insights.DeletePhraseMatcherRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_phrase_matcher_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_phrase_matcher(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_phrase_matcher_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_phrase_matcher(
contact_center_insights.DeletePhraseMatcherRequest(), name="name_value",
)
def test_update_phrase_matcher(
transport: str = "grpc",
request_type=contact_center_insights.UpdatePhraseMatcherRequest,
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
response = client.update_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
def test_update_phrase_matcher_from_dict():
test_update_phrase_matcher(request_type=dict)
def test_update_phrase_matcher_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
client.update_phrase_matcher()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest()
@pytest.mark.asyncio
async def test_update_phrase_matcher_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UpdatePhraseMatcherRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher(
name="name_value",
revision_id="revision_id_value",
version_tag="version_tag_value",
display_name="display_name_value",
type_=resources.PhraseMatcher.PhraseMatcherType.ALL_OF,
active=True,
role_match=resources.ConversationParticipant.Role.HUMAN_AGENT,
)
)
response = await client.update_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdatePhraseMatcherRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.PhraseMatcher)
assert response.name == "name_value"
assert response.revision_id == "revision_id_value"
assert response.version_tag == "version_tag_value"
assert response.display_name == "display_name_value"
assert response.type_ == resources.PhraseMatcher.PhraseMatcherType.ALL_OF
assert response.active is True
assert response.role_match == resources.ConversationParticipant.Role.HUMAN_AGENT
@pytest.mark.asyncio
async def test_update_phrase_matcher_async_from_dict():
await test_update_phrase_matcher_async(request_type=dict)
def test_update_phrase_matcher_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdatePhraseMatcherRequest()
request.phrase_matcher.name = "phrase_matcher.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
call.return_value = resources.PhraseMatcher()
client.update_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"phrase_matcher.name=phrase_matcher.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_phrase_matcher_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdatePhraseMatcherRequest()
request.phrase_matcher.name = "phrase_matcher.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
await client.update_phrase_matcher(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"phrase_matcher.name=phrase_matcher.name/value",
) in kw["metadata"]
def test_update_phrase_matcher_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_phrase_matcher(
phrase_matcher=resources.PhraseMatcher(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].phrase_matcher
mock_val = resources.PhraseMatcher(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_phrase_matcher_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_phrase_matcher(
contact_center_insights.UpdatePhraseMatcherRequest(),
phrase_matcher=resources.PhraseMatcher(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_phrase_matcher_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_phrase_matcher), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.PhraseMatcher()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.PhraseMatcher()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_phrase_matcher(
phrase_matcher=resources.PhraseMatcher(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].phrase_matcher
mock_val = resources.PhraseMatcher(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_phrase_matcher_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_phrase_matcher(
contact_center_insights.UpdatePhraseMatcherRequest(),
phrase_matcher=resources.PhraseMatcher(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_calculate_stats(
transport: str = "grpc", request_type=contact_center_insights.CalculateStatsRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateStatsResponse(
average_turn_count=1931, conversation_count=1955,
)
response = client.calculate_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateStatsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.CalculateStatsResponse)
assert response.average_turn_count == 1931
assert response.conversation_count == 1955
def test_calculate_stats_from_dict():
test_calculate_stats(request_type=dict)
def test_calculate_stats_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
client.calculate_stats()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateStatsRequest()
@pytest.mark.asyncio
async def test_calculate_stats_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.CalculateStatsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateStatsResponse(
average_turn_count=1931, conversation_count=1955,
)
)
response = await client.calculate_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.CalculateStatsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, contact_center_insights.CalculateStatsResponse)
assert response.average_turn_count == 1931
assert response.conversation_count == 1955
@pytest.mark.asyncio
async def test_calculate_stats_async_from_dict():
await test_calculate_stats_async(request_type=dict)
def test_calculate_stats_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CalculateStatsRequest()
request.location = "location/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
call.return_value = contact_center_insights.CalculateStatsResponse()
client.calculate_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "location=location/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_calculate_stats_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.CalculateStatsRequest()
request.location = "location/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateStatsResponse()
)
await client.calculate_stats(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "location=location/value",) in kw["metadata"]
def test_calculate_stats_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateStatsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.calculate_stats(location="location_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].location
mock_val = "location_value"
assert arg == mock_val
def test_calculate_stats_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.calculate_stats(
contact_center_insights.CalculateStatsRequest(), location="location_value",
)
@pytest.mark.asyncio
async def test_calculate_stats_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.calculate_stats), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = contact_center_insights.CalculateStatsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
contact_center_insights.CalculateStatsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.calculate_stats(location="location_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].location
mock_val = "location_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_calculate_stats_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.calculate_stats(
contact_center_insights.CalculateStatsRequest(), location="location_value",
)
def test_get_settings(
transport: str = "grpc", request_type=contact_center_insights.GetSettingsRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings(
name="name_value", language_code="language_code_value",
)
response = client.get_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetSettingsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Settings)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
def test_get_settings_from_dict():
test_get_settings(request_type=dict)
def test_get_settings_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
client.get_settings()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetSettingsRequest()
@pytest.mark.asyncio
async def test_get_settings_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.GetSettingsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Settings(name="name_value", language_code="language_code_value",)
)
response = await client.get_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.GetSettingsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Settings)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
@pytest.mark.asyncio
async def test_get_settings_async_from_dict():
await test_get_settings_async(request_type=dict)
def test_get_settings_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetSettingsRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
call.return_value = resources.Settings()
client.get_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_settings_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.GetSettingsRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings())
await client.get_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_settings_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_settings(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_settings_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_settings(
contact_center_insights.GetSettingsRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_settings_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_settings(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_settings_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_settings(
contact_center_insights.GetSettingsRequest(), name="name_value",
)
def test_update_settings(
transport: str = "grpc", request_type=contact_center_insights.UpdateSettingsRequest
):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings(
name="name_value", language_code="language_code_value",
)
response = client.update_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateSettingsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Settings)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
def test_update_settings_from_dict():
test_update_settings(request_type=dict)
def test_update_settings_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
client.update_settings()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateSettingsRequest()
@pytest.mark.asyncio
async def test_update_settings_async(
transport: str = "grpc_asyncio",
request_type=contact_center_insights.UpdateSettingsRequest,
):
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Settings(name="name_value", language_code="language_code_value",)
)
response = await client.update_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == contact_center_insights.UpdateSettingsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Settings)
assert response.name == "name_value"
assert response.language_code == "language_code_value"
@pytest.mark.asyncio
async def test_update_settings_async_from_dict():
await test_update_settings_async(request_type=dict)
def test_update_settings_field_headers():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateSettingsRequest()
request.settings.name = "settings.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
call.return_value = resources.Settings()
client.update_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "settings.name=settings.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_settings_field_headers_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = contact_center_insights.UpdateSettingsRequest()
request.settings.name = "settings.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings())
await client.update_settings(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "settings.name=settings.name/value",) in kw[
"metadata"
]
def test_update_settings_flattened():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_settings(
settings=resources.Settings(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].settings
mock_val = resources.Settings(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_settings_flattened_error():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_settings(
contact_center_insights.UpdateSettingsRequest(),
settings=resources.Settings(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_settings_flattened_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Settings()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Settings())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_settings(
settings=resources.Settings(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].settings
mock_val = resources.Settings(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_settings_flattened_error_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_settings(
contact_center_insights.UpdateSettingsRequest(),
settings=resources.Settings(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ContactCenterInsightsGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ContactCenterInsightsGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ContactCenterInsightsClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ContactCenterInsightsGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ContactCenterInsightsClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ContactCenterInsightsGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ContactCenterInsightsClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ContactCenterInsightsGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ContactCenterInsightsGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.ContactCenterInsightsGrpcTransport,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(client.transport, transports.ContactCenterInsightsGrpcTransport,)
def test_contact_center_insights_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ContactCenterInsightsTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_contact_center_insights_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.contact_center_insights_v1.services.contact_center_insights.transports.ContactCenterInsightsTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.ContactCenterInsightsTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"create_conversation",
"update_conversation",
"get_conversation",
"list_conversations",
"delete_conversation",
"create_analysis",
"get_analysis",
"list_analyses",
"delete_analysis",
"export_insights_data",
"create_issue_model",
"update_issue_model",
"get_issue_model",
"list_issue_models",
"delete_issue_model",
"deploy_issue_model",
"undeploy_issue_model",
"get_issue",
"list_issues",
"update_issue",
"calculate_issue_model_stats",
"create_phrase_matcher",
"get_phrase_matcher",
"list_phrase_matchers",
"delete_phrase_matcher",
"update_phrase_matcher",
"calculate_stats",
"get_settings",
"update_settings",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_contact_center_insights_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.contact_center_insights_v1.services.contact_center_insights.transports.ContactCenterInsightsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ContactCenterInsightsTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_contact_center_insights_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.contact_center_insights_v1.services.contact_center_insights.transports.ContactCenterInsightsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ContactCenterInsightsTransport()
adc.assert_called_once()
def test_contact_center_insights_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ContactCenterInsightsClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ContactCenterInsightsGrpcTransport,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
],
)
def test_contact_center_insights_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ContactCenterInsightsGrpcTransport, grpc_helpers),
(transports.ContactCenterInsightsGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_contact_center_insights_transport_create_channel(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"contactcenterinsights.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="contactcenterinsights.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ContactCenterInsightsGrpcTransport,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
],
)
def test_contact_center_insights_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_contact_center_insights_host_no_port():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="contactcenterinsights.googleapis.com"
),
)
assert client.transport._host == "contactcenterinsights.googleapis.com:443"
def test_contact_center_insights_host_with_port():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="contactcenterinsights.googleapis.com:8000"
),
)
assert client.transport._host == "contactcenterinsights.googleapis.com:8000"
def test_contact_center_insights_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ContactCenterInsightsGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_contact_center_insights_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ContactCenterInsightsGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ContactCenterInsightsGrpcTransport,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
],
)
def test_contact_center_insights_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.ContactCenterInsightsGrpcTransport,
transports.ContactCenterInsightsGrpcAsyncIOTransport,
],
)
def test_contact_center_insights_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_contact_center_insights_grpc_lro_client():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_contact_center_insights_grpc_lro_async_client():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_analysis_path():
project = "squid"
location = "clam"
conversation = "whelk"
analysis = "octopus"
expected = "projects/{project}/locations/{location}/conversations/{conversation}/analyses/{analysis}".format(
project=project,
location=location,
conversation=conversation,
analysis=analysis,
)
actual = ContactCenterInsightsClient.analysis_path(
project, location, conversation, analysis
)
assert expected == actual
def test_parse_analysis_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"conversation": "cuttlefish",
"analysis": "mussel",
}
path = ContactCenterInsightsClient.analysis_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_analysis_path(path)
assert expected == actual
def test_conversation_path():
project = "winkle"
location = "nautilus"
conversation = "scallop"
expected = "projects/{project}/locations/{location}/conversations/{conversation}".format(
project=project, location=location, conversation=conversation,
)
actual = ContactCenterInsightsClient.conversation_path(
project, location, conversation
)
assert expected == actual
def test_parse_conversation_path():
expected = {
"project": "abalone",
"location": "squid",
"conversation": "clam",
}
path = ContactCenterInsightsClient.conversation_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_conversation_path(path)
assert expected == actual
def test_issue_path():
project = "whelk"
location = "octopus"
issue_model = "oyster"
issue = "nudibranch"
expected = "projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}".format(
project=project, location=location, issue_model=issue_model, issue=issue,
)
actual = ContactCenterInsightsClient.issue_path(
project, location, issue_model, issue
)
assert expected == actual
def test_parse_issue_path():
expected = {
"project": "cuttlefish",
"location": "mussel",
"issue_model": "winkle",
"issue": "nautilus",
}
path = ContactCenterInsightsClient.issue_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_issue_path(path)
assert expected == actual
def test_issue_model_path():
project = "scallop"
location = "abalone"
issue_model = "squid"
expected = "projects/{project}/locations/{location}/issueModels/{issue_model}".format(
project=project, location=location, issue_model=issue_model,
)
actual = ContactCenterInsightsClient.issue_model_path(
project, location, issue_model
)
assert expected == actual
def test_parse_issue_model_path():
expected = {
"project": "clam",
"location": "whelk",
"issue_model": "octopus",
}
path = ContactCenterInsightsClient.issue_model_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_issue_model_path(path)
assert expected == actual
def test_participant_path():
project = "oyster"
conversation = "nudibranch"
participant = "cuttlefish"
expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format(
project=project, conversation=conversation, participant=participant,
)
actual = ContactCenterInsightsClient.participant_path(
project, conversation, participant
)
assert expected == actual
def test_parse_participant_path():
expected = {
"project": "mussel",
"conversation": "winkle",
"participant": "nautilus",
}
path = ContactCenterInsightsClient.participant_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_participant_path(path)
assert expected == actual
def test_phrase_matcher_path():
project = "scallop"
location = "abalone"
phrase_matcher = "squid"
expected = "projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}".format(
project=project, location=location, phrase_matcher=phrase_matcher,
)
actual = ContactCenterInsightsClient.phrase_matcher_path(
project, location, phrase_matcher
)
assert expected == actual
def test_parse_phrase_matcher_path():
expected = {
"project": "clam",
"location": "whelk",
"phrase_matcher": "octopus",
}
path = ContactCenterInsightsClient.phrase_matcher_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_phrase_matcher_path(path)
assert expected == actual
def test_settings_path():
project = "oyster"
location = "nudibranch"
expected = "projects/{project}/locations/{location}/settings".format(
project=project, location=location,
)
actual = ContactCenterInsightsClient.settings_path(project, location)
assert expected == actual
def test_parse_settings_path():
expected = {
"project": "cuttlefish",
"location": "mussel",
}
path = ContactCenterInsightsClient.settings_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_settings_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "winkle"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = ContactCenterInsightsClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nautilus",
}
path = ContactCenterInsightsClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "scallop"
expected = "folders/{folder}".format(folder=folder,)
actual = ContactCenterInsightsClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "abalone",
}
path = ContactCenterInsightsClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "squid"
expected = "organizations/{organization}".format(organization=organization,)
actual = ContactCenterInsightsClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "clam",
}
path = ContactCenterInsightsClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "whelk"
expected = "projects/{project}".format(project=project,)
actual = ContactCenterInsightsClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "octopus",
}
path = ContactCenterInsightsClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "oyster"
location = "nudibranch"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = ContactCenterInsightsClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "cuttlefish",
"location": "mussel",
}
path = ContactCenterInsightsClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ContactCenterInsightsClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.ContactCenterInsightsTransport, "_prep_wrapped_messages"
) as prep:
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.ContactCenterInsightsTransport, "_prep_wrapped_messages"
) as prep:
transport_class = ContactCenterInsightsClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = ContactCenterInsightsAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = ContactCenterInsightsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| 37.685818
| 147
| 0.695218
| 37,399
| 323,382
| 5.783336
| 0.013637
| 0.016089
| 0.026145
| 0.059605
| 0.957363
| 0.94396
| 0.931384
| 0.909016
| 0.894471
| 0.880286
| 0
| 0.004247
| 0.225303
| 323,382
| 8,580
| 148
| 37.69021
| 0.859111
| 0.221936
| 0
| 0.708333
| 0
| 0
| 0.067041
| 0.020391
| 0
| 0
| 0
| 0
| 0.136123
| 1
| 0.042373
| false
| 0.000177
| 0.005297
| 0.000353
| 0.048023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a467bb5f26c7a81c9900e2ffb2675e6fd059a21
| 2,963
|
py
|
Python
|
tests/functional/regressions/test_issue87.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | null | null | null |
tests/functional/regressions/test_issue87.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | 1
|
2020-08-11T15:41:41.000Z
|
2020-08-11T15:41:41.000Z
|
tests/functional/regressions/test_issue87.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.asyncio
@pytest.mark.ttftt_engine
@pytest.mark.parametrize(
"query,errors",
[
(
"""
subscription Sub {
newDog {
name
}
newHuman {
name
}
}
""",
[
{
"message": "Subscription operations must have exactly one root field.",
"path": None,
"locations": [{"line": 2, "column": 13}],
}
],
),
(
"""
subscription Sub {
newDog {
name
}
__typename
}
""",
[
{
"message": "Subscription operations must have exactly one root field.",
"path": None,
"locations": [{"line": 2, "column": 13}],
}
],
),
(
"""
fragment MultipleSubscriptionsFields on Subscription {
newDog {
name
}
newHuman {
name
}
}
subscription Sub {
...MultipleSubscriptionsFields
}
""",
[
{
"message": "Subscription operations must have exactly one root field.",
"path": None,
"locations": [{"line": 11, "column": 13}],
}
],
),
(
"""
subscription Sub {
... on Subscription {
newDog {
name
}
newHuman {
name
}
}
}
""",
[
{
"message": "Subscription operations must have exactly one root field.",
"path": None,
"locations": [{"line": 2, "column": 13}],
}
],
),
(
"""
fragment MultipleSubscriptionsFields on Subscription {
... on Subscription {
newDog {
name
}
newHuman {
name
}
}
}
subscription Sub {
...MultipleSubscriptionsFields
}
""",
[
{
"message": "Subscription operations must have exactly one root field.",
"path": None,
"locations": [{"line": 13, "column": 13}],
}
],
),
],
)
async def test_issue87(engine, query, errors):
assert await engine.execute(query) == {"data": None, "errors": errors}
| 25.110169
| 91
| 0.325346
| 153
| 2,963
| 6.27451
| 0.27451
| 0.078125
| 0.151042
| 0.171875
| 0.739583
| 0.739583
| 0.725
| 0.725
| 0.725
| 0.725
| 0
| 0.015152
| 0.57678
| 2,963
| 117
| 92
| 25.324786
| 0.750399
| 0
| 0
| 0.4
| 0
| 0
| 0.270735
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 1
| 0
| true
| 0
| 0.016667
| 0
| 0.016667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a4b2b8b8bf9dc3fdf03f024adf700970a2ab0a4
| 100
|
py
|
Python
|
SlopPy-tests/regression-tests/unpack_seq.py
|
pajju/SlopPy
|
59a9125de0401959ee40ef02193265248d54b075
|
[
"PSF-2.0"
] | 2
|
2022-02-03T23:56:24.000Z
|
2022-02-08T19:18:46.000Z
|
SlopPy-tests/regression-tests/unpack_seq.py
|
pajju/SlopPy
|
59a9125de0401959ee40ef02193265248d54b075
|
[
"PSF-2.0"
] | null | null | null |
SlopPy-tests/regression-tests/unpack_seq.py
|
pajju/SlopPy
|
59a9125de0401959ee40ef02193265248d54b075
|
[
"PSF-2.0"
] | null | null | null |
(x, y, z) = "hello,world".split(',')
assert type(x) is NA
assert type(y) is NA
assert type(z) is NA
| 20
| 36
| 0.63
| 21
| 100
| 3
| 0.47619
| 0.47619
| 0.31746
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17
| 100
| 4
| 37
| 25
| 0.759036
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a60602f21f7f988ad124823d96014360b6ce059
| 277
|
py
|
Python
|
operators.py
|
apatania/Earth-119
|
173fbdf9d306c46c2e67b1fbf51a27268351f264
|
[
"MIT"
] | null | null | null |
operators.py
|
apatania/Earth-119
|
173fbdf9d306c46c2e67b1fbf51a27268351f264
|
[
"MIT"
] | 1
|
2018-10-11T04:00:03.000Z
|
2018-10-18T01:06:31.000Z
|
operators.py
|
apatania/astr-119-hw-1
|
173fbdf9d306c46c2e67b1fbf51a27268351f264
|
[
"MIT"
] | null | null | null |
x=9
y=3
print(x+y)
print(x-y)
print(x*y)
print(x/y)
print(x%y)
print(x**y)
x=9.191823
print(x//y)
x = 9
x += 3
print(x)
x =9
x -= 3
print(x)
x *= 3
print(x)
x /= 3
print(x)
x **= 3
print(x)
x=9
y=3
print(x==y)
print(x!=y)
print(x>y)
print(x<y)
print(x>=y)
print(x<=y)
| 6.925
| 11
| 0.545126
| 74
| 277
| 2.040541
| 0.081081
| 0.715232
| 0.602649
| 0.794702
| 0.960265
| 0.900662
| 0.900662
| 0.821192
| 0.821192
| 0.821192
| 0
| 0.08
| 0.187726
| 277
| 40
| 12
| 6.925
| 0.591111
| 0
| 0
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
5a8abd4b7cdaf2653841224d8076d3a719b2309f
| 155
|
py
|
Python
|
files/scriptfiles/example5.py
|
EvilJinious1/trips
|
6515d1500a12e7d032f67bdd1111385985a4e218
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:21:46.000Z
|
2020-08-18T00:21:46.000Z
|
files/scriptfiles/example5.py
|
EvilJinious1/trips
|
6515d1500a12e7d032f67bdd1111385985a4e218
|
[
"Apache-2.0"
] | 26
|
2020-08-03T19:59:14.000Z
|
2020-08-24T01:35:25.000Z
|
files/scriptfiles/example5.py
|
EvilJinious1/trips
|
6515d1500a12e7d032f67bdd1111385985a4e218
|
[
"Apache-2.0"
] | null | null | null |
import pathlib
# path of the given file
print(pathlib.Path("my_file.txt").parent.absolute())
# current working directory
print(pathlib.Path().absolute())
| 22.142857
| 52
| 0.76129
| 22
| 155
| 5.318182
| 0.681818
| 0.282051
| 0.273504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 155
| 7
| 53
| 22.142857
| 0.835714
| 0.309677
| 0
| 0
| 0
| 0
| 0.104762
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
ce52436a43d2ba2025d4296733550364c7bafdce
| 1,173
|
py
|
Python
|
notebooks/MetPy_Advanced/solutions/QG_data.py
|
jthielen/unidata-python-workshop
|
120e9c310443274465a87780f972956b76e2acb5
|
[
"MIT"
] | 1
|
2021-08-16T03:12:07.000Z
|
2021-08-16T03:12:07.000Z
|
notebooks/MetPy_Advanced/solutions/QG_data.py
|
jthielen/unidata-python-workshop
|
120e9c310443274465a87780f972956b76e2acb5
|
[
"MIT"
] | null | null | null |
notebooks/MetPy_Advanced/solutions/QG_data.py
|
jthielen/unidata-python-workshop
|
120e9c310443274465a87780f972956b76e2acb5
|
[
"MIT"
] | null | null | null |
# Remaining variables needed to compute QG Omega forcing terms
hght_500 = (ds.Geopotential_height_isobaric.sel({lev_name:500,
time_name:'{:%Y-%m-%d}'.format(dt)}).values
* units(ds.Geopotential_height_isobaric.units))
uwnd_500 = (ds['u-component_of_wind_isobaric'].sel({lev_name:500,
time_name:'{:%Y-%m-%d}'.format(dt)}).values
* units(ds['u-component_of_wind_isobaric'].units))
vwnd_500 = (ds['v-component_of_wind_isobaric'].sel({lev_name:500,
time_name:'{:%Y-%m-%d}'.format(dt)}).values
* units(ds['v-component_of_wind_isobaric'].units))
uwnd_900 = (ds['u-component_of_wind_isobaric'].sel({lev_name:900,
time_name:'{:%Y-%m-%d}'.format(dt)}).values
* units(ds['u-component_of_wind_isobaric'].units))
vwnd_900 = (ds['v-component_of_wind_isobaric'].sel({lev_name:900,
time_name:'{:%Y-%m-%d}'.format(dt)}).values
* units(ds['v-component_of_wind_isobaric'].units))
| 73.3125
| 94
| 0.540494
| 145
| 1,173
| 4.075862
| 0.234483
| 0.1489
| 0.203046
| 0.311337
| 0.780034
| 0.780034
| 0.780034
| 0.780034
| 0.780034
| 0.759729
| 0
| 0.036496
| 0.299233
| 1,173
| 16
| 95
| 73.3125
| 0.682482
| 0.051151
| 0
| 0.6
| 0
| 0
| 0.250899
| 0.201439
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce6048e74bda3553521cdb84f2824880b8a2750a
| 2,128
|
py
|
Python
|
src/ctc/protocols/fourbyte_utils/query_utils/general_queries.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/protocols/fourbyte_utils/query_utils/general_queries.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/protocols/fourbyte_utils/query_utils/general_queries.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from __future__ import annotations
import typing
from .. import fourbyte_spec
from . import local_queries
from . import remote_queries
async def async_query_function_signature(
hex_signature: typing.Optional[str] = None,
*,
id: typing.Optional[int] = None,
bytes_signature: typing.Optional[str] = None,
text_signature: typing.Optional[str] = None,
use_local: bool = True,
use_remote: bool = True,
) -> list[fourbyte_spec.Entry]:
if not use_local and not use_remote:
raise Exception('should use at least one of use_local or use_remote')
if use_local:
result = local_queries.query_function_signature(
id=id,
bytes_signature=bytes_signature,
hex_signature=hex_signature,
text_signature=text_signature,
)
if len(result) > 0:
return result
if use_remote:
return await remote_queries.async_query_function_signature_remote(
id=id,
bytes_signature=bytes_signature,
hex_signature=hex_signature,
text_signature=text_signature,
)
return []
async def async_query_event_signature(
hex_signature: typing.Optional[str] = None,
*,
id: typing.Optional[int] = None,
bytes_signature: typing.Optional[str] = None,
text_signature: typing.Optional[str] = None,
use_local: bool = True,
use_remote: bool = True,
) -> list[fourbyte_spec.Entry]:
if not use_local and not use_remote:
raise Exception('should use at least one of use_local or use_remote')
if use_local:
result = local_queries.query_event_signature(
id=id,
bytes_signature=bytes_signature,
hex_signature=hex_signature,
text_signature=text_signature,
)
if len(result) > 0:
return result
if use_remote:
return await remote_queries.async_query_event_signature_remote(
id=id,
bytes_signature=bytes_signature,
hex_signature=hex_signature,
text_signature=text_signature,
)
return []
| 28
| 77
| 0.653195
| 254
| 2,128
| 5.169291
| 0.177165
| 0.091394
| 0.159939
| 0.118812
| 0.862148
| 0.862148
| 0.862148
| 0.862148
| 0.862148
| 0.862148
| 0
| 0.001294
| 0.273496
| 2,128
| 75
| 78
| 28.373333
| 0.847995
| 0
| 0
| 0.754098
| 0
| 0
| 0.046992
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.081967
| 0
| 0.180328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce740ec473a1f2cbee3449878f394bd6364a3ed9
| 169
|
py
|
Python
|
mlearn/autograd/__init__.py
|
EequalsMCsquare/mlearn
|
bee79618fb80568b99bc2eefcd97dab33967ee12
|
[
"MIT"
] | 2
|
2019-12-13T16:06:24.000Z
|
2020-01-04T13:44:08.000Z
|
mlearn/autograd/__init__.py
|
EequalsMCsquare/mlearn
|
bee79618fb80568b99bc2eefcd97dab33967ee12
|
[
"MIT"
] | null | null | null |
mlearn/autograd/__init__.py
|
EequalsMCsquare/mlearn
|
bee79618fb80568b99bc2eefcd97dab33967ee12
|
[
"MIT"
] | null | null | null |
from ..layers import Module
from .tensor import Tensor
from .tensor import Dependency
from .parameter import Parameter
from .tensor import ones, randn, zeros, zeros_like
| 33.8
| 50
| 0.816568
| 24
| 169
| 5.708333
| 0.458333
| 0.218978
| 0.350365
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130178
| 169
| 5
| 50
| 33.8
| 0.931973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ce86b18bdaac200f460922b1676bf5ec45eb7fa4
| 54,163
|
py
|
Python
|
sdk/python/pulumi_azure/network/application_gateway.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/network/application_gateway.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/network/application_gateway.py
|
kenny-wealth/pulumi-azure
|
e57e3a81f95bf622e7429c53f0bff93e33372aa1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class ApplicationGateway(pulumi.CustomResource):
authentication_certificates: pulumi.Output[list]
"""
One or more `authentication_certificate` blocks as defined below.
* `data` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
"""
autoscale_configuration: pulumi.Output[dict]
"""
A `autoscale_configuration` block as defined below.
* `maxCapacity` (`float`)
* `minCapacity` (`float`)
"""
backend_address_pools: pulumi.Output[list]
"""
One or more `backend_address_pool` blocks as defined below.
* `fqdnLists` (`list`)
* `fqdns` (`list`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `ipAddressLists` (`list`)
* `ipAddresses` (`list`)
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
"""
backend_http_settings: pulumi.Output[list]
"""
One or more `backend_http_settings` blocks as defined below.
* `affinityCookieName` (`str`)
* `authentication_certificates` (`list`) - One or more `authentication_certificate` blocks as defined below.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `connectionDraining` (`dict`)
* `drainTimeoutSec` (`float`)
* `enabled` (`bool`)
* `cookieBasedAffinity` (`str`)
* `host_name` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`str`)
* `pickHostNameFromBackendAddress` (`bool`)
* `port` (`float`)
* `probe_id` (`str`) - The ID of the associated Probe.
* `probeName` (`str`)
* `protocol` (`str`)
* `requestTimeout` (`float`)
"""
custom_error_configurations: pulumi.Output[list]
"""
One or more `custom_error_configuration` blocks as defined below.
* `customErrorPageUrl` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `statusCode` (`str`)
"""
disabled_ssl_protocols: pulumi.Output[list]
"""
A list of SSL Protocols which should be disabled on this Application Gateway. Possible values are `TLSv1_0`, `TLSv1_1` and `TLSv1_2`.
> **NOTE:** `disabled_ssl_protocols ` has been deprecated in favour of `disabled_protocols` in the `ssl_policy` block.
"""
enable_http2: pulumi.Output[bool]
"""
Is HTTP2 enabled on the application gateway resource? Defaults to `false`.
"""
frontend_ip_configurations: pulumi.Output[list]
"""
One or more `frontend_ip_configuration` blocks as defined below.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `private_ip_address` (`str`)
* `privateIpAddressAllocation` (`str`)
* `publicIpAddressId` (`str`)
* `subnet_id` (`str`)
"""
frontend_ports: pulumi.Output[list]
"""
One or more `frontend_port` blocks as defined below.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `port` (`float`)
"""
gateway_ip_configurations: pulumi.Output[list]
"""
One or more `gateway_ip_configuration` blocks as defined below.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `subnet_id` (`str`)
"""
http_listeners: pulumi.Output[list]
"""
One or more `http_listener` blocks as defined below.
* `custom_error_configurations` (`list`) - One or more `custom_error_configuration` blocks as defined below.
* `customErrorPageUrl` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `statusCode` (`str`)
* `frontend_ip_configuration_id` (`str`) - The ID of the associated Frontend Configuration.
* `frontend_ip_configuration_name` (`str`)
* `frontendPortId` (`str`) - The ID of the associated Frontend Port.
* `frontendPortName` (`str`)
* `host_name` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `protocol` (`str`)
* `requireSni` (`bool`)
* `sslCertificateId` (`str`) - The ID of the associated SSL Certificate.
* `sslCertificateName` (`str`)
"""
identity: pulumi.Output[dict]
"""
A `identity` block.
* `identityIds` (`str`)
* `type` (`str`)
"""
location: pulumi.Output[str]
"""
The Azure region where the Application Gateway should exist. Changing this forces a new resource to be created.
"""
name: pulumi.Output[str]
"""
The name of the Application Gateway. Changing this forces a new resource to be created.
"""
probes: pulumi.Output[list]
"""
One or more `probe` blocks as defined below.
* `host` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `interval` (`float`)
* `match` (`dict`)
* `body` (`str`)
* `statusCodes` (`list`)
* `minimumServers` (`float`)
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`str`)
* `pickHostNameFromBackendHttpSettings` (`bool`)
* `protocol` (`str`)
* `timeout` (`float`)
* `unhealthyThreshold` (`float`)
"""
redirect_configurations: pulumi.Output[list]
"""
A `redirect_configuration` block as defined below.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `includePath` (`bool`)
* `includeQueryString` (`bool`)
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectType` (`str`)
* `targetListenerId` (`str`)
* `targetListenerName` (`str`)
* `targetUrl` (`str`)
"""
request_routing_rules: pulumi.Output[list]
"""
One or more `request_routing_rule` blocks as defined below.
* `backend_address_pool_id` (`str`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`str`)
* `backendHttpSettingsId` (`str`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`str`)
* `httpListenerId` (`str`) - The ID of the associated HTTP Listener.
* `httpListenerName` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectConfigurationId` (`str`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`str`)
* `rewriteRuleSetId` (`str`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`str`)
* `ruleType` (`str`)
* `urlPathMapId` (`str`) - The ID of the associated URL Path Map.
* `urlPathMapName` (`str`)
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which to the Application Gateway should exist. Changing this forces a new resource to be created.
"""
rewrite_rule_sets: pulumi.Output[list]
"""
One or more `rewrite_rule_set` blocks as defined below. Only valid for v2 SKUs.
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `rewriteRules` (`list`)
* `conditions` (`list`)
* `ignoreCase` (`bool`)
* `negate` (`bool`)
* `pattern` (`str`)
* `variable` (`str`)
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `requestHeaderConfigurations` (`list`)
* `headerName` (`str`)
* `headerValue` (`str`)
* `responseHeaderConfigurations` (`list`)
* `headerName` (`str`)
* `headerValue` (`str`)
* `ruleSequence` (`float`)
"""
sku: pulumi.Output[dict]
"""
A `sku` block as defined below.
* `capacity` (`float`)
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `tier` (`str`)
"""
ssl_certificates: pulumi.Output[list]
"""
One or more `ssl_certificate` blocks as defined below.
* `data` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `password` (`str`)
* `publicCertData` (`str`) - The Public Certificate Data associated with the SSL Certificate.
"""
ssl_policies: pulumi.Output[list]
"""
a `ssl policy` block as defined below.
* `cipherSuites` (`list`)
* `disabledProtocols` (`list`)
* `minProtocolVersion` (`str`)
* `policyName` (`str`)
* `policyType` (`str`)
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
trusted_root_certificates: pulumi.Output[list]
"""
One or more `trusted_root_certificate` blocks as defined below.
* `data` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
"""
url_path_maps: pulumi.Output[list]
"""
One or more `url_path_map` blocks as defined below.
* `defaultBackendAddressPoolId` (`str`) - The ID of the Default Backend Address Pool.
* `defaultBackendAddressPoolName` (`str`)
* `defaultBackendHttpSettingsId` (`str`) - The ID of the Default Backend HTTP Settings Collection.
* `defaultBackendHttpSettingsName` (`str`)
* `defaultRedirectConfigurationId` (`str`) - The ID of the Default Redirect Configuration.
* `defaultRedirectConfigurationName` (`str`)
* `defaultRewriteRuleSetId` (`str`)
* `defaultRewriteRuleSetName` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `pathRules` (`list`) - A list of `path_rule` blocks as defined above.
* `backend_address_pool_id` (`str`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`str`)
* `backendHttpSettingsId` (`str`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`str`)
* `id` (`str`) - The ID of the Rewrite Rule Set
* `name` (`str`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `paths` (`list`)
* `redirectConfigurationId` (`str`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`str`)
* `rewriteRuleSetId` (`str`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`str`)
"""
waf_configuration: pulumi.Output[dict]
"""
A `waf_configuration` block as defined below.
* `disabledRuleGroups` (`list`)
* `ruleGroupName` (`str`)
* `rules` (`list`)
* `enabled` (`bool`)
* `exclusions` (`list`)
* `matchVariable` (`str`)
* `selector` (`str`)
* `selectorMatchOperator` (`str`)
* `fileUploadLimitMb` (`float`)
* `firewallMode` (`str`)
* `maxRequestBodySizeKb` (`float`)
* `requestBodyCheck` (`bool`)
* `ruleSetType` (`str`)
* `ruleSetVersion` (`str`)
"""
zones: pulumi.Output[list]
"""
A collection of availability zones to spread the Application Gateway over.
"""
def __init__(__self__, resource_name, opts=None, authentication_certificates=None, autoscale_configuration=None, backend_address_pools=None, backend_http_settings=None, custom_error_configurations=None, disabled_ssl_protocols=None, enable_http2=None, frontend_ip_configurations=None, frontend_ports=None, gateway_ip_configurations=None, http_listeners=None, identity=None, location=None, name=None, probes=None, redirect_configurations=None, request_routing_rules=None, resource_group_name=None, rewrite_rule_sets=None, sku=None, ssl_certificates=None, ssl_policies=None, tags=None, trusted_root_certificates=None, url_path_maps=None, waf_configuration=None, zones=None, __props__=None, __name__=None, __opts__=None):
"""
Manages an Application Gateway.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] authentication_certificates: One or more `authentication_certificate` blocks as defined below.
:param pulumi.Input[dict] autoscale_configuration: A `autoscale_configuration` block as defined below.
:param pulumi.Input[list] backend_address_pools: One or more `backend_address_pool` blocks as defined below.
:param pulumi.Input[list] backend_http_settings: One or more `backend_http_settings` blocks as defined below.
:param pulumi.Input[list] custom_error_configurations: One or more `custom_error_configuration` blocks as defined below.
:param pulumi.Input[list] disabled_ssl_protocols: A list of SSL Protocols which should be disabled on this Application Gateway. Possible values are `TLSv1_0`, `TLSv1_1` and `TLSv1_2`.
> **NOTE:** `disabled_ssl_protocols ` has been deprecated in favour of `disabled_protocols` in the `ssl_policy` block.
:param pulumi.Input[bool] enable_http2: Is HTTP2 enabled on the application gateway resource? Defaults to `false`.
:param pulumi.Input[list] frontend_ip_configurations: One or more `frontend_ip_configuration` blocks as defined below.
:param pulumi.Input[list] frontend_ports: One or more `frontend_port` blocks as defined below.
:param pulumi.Input[list] gateway_ip_configurations: One or more `gateway_ip_configuration` blocks as defined below.
:param pulumi.Input[list] http_listeners: One or more `http_listener` blocks as defined below.
:param pulumi.Input[dict] identity: A `identity` block.
:param pulumi.Input[str] location: The Azure region where the Application Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Application Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[list] probes: One or more `probe` blocks as defined below.
:param pulumi.Input[list] redirect_configurations: A `redirect_configuration` block as defined below.
:param pulumi.Input[list] request_routing_rules: One or more `request_routing_rule` blocks as defined below.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to the Application Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[list] rewrite_rule_sets: One or more `rewrite_rule_set` blocks as defined below. Only valid for v2 SKUs.
:param pulumi.Input[dict] sku: A `sku` block as defined below.
:param pulumi.Input[list] ssl_certificates: One or more `ssl_certificate` blocks as defined below.
:param pulumi.Input[list] ssl_policies: a `ssl policy` block as defined below.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[list] trusted_root_certificates: One or more `trusted_root_certificate` blocks as defined below.
:param pulumi.Input[list] url_path_maps: One or more `url_path_map` blocks as defined below.
:param pulumi.Input[dict] waf_configuration: A `waf_configuration` block as defined below.
:param pulumi.Input[list] zones: A collection of availability zones to spread the Application Gateway over.
The **authentication_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **autoscale_configuration** object supports the following:
* `maxCapacity` (`pulumi.Input[float]`)
* `minCapacity` (`pulumi.Input[float]`)
The **backend_address_pools** object supports the following:
* `fqdnLists` (`pulumi.Input[list]`)
* `fqdns` (`pulumi.Input[list]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `ipAddressLists` (`pulumi.Input[list]`)
* `ipAddresses` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **backend_http_settings** object supports the following:
* `affinityCookieName` (`pulumi.Input[str]`)
* `authentication_certificates` (`pulumi.Input[list]`) - One or more `authentication_certificate` blocks as defined below.
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `connectionDraining` (`pulumi.Input[dict]`)
* `drainTimeoutSec` (`pulumi.Input[float]`)
* `enabled` (`pulumi.Input[bool]`)
* `cookieBasedAffinity` (`pulumi.Input[str]`)
* `host_name` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`pulumi.Input[str]`)
* `pickHostNameFromBackendAddress` (`pulumi.Input[bool]`)
* `port` (`pulumi.Input[float]`)
* `probe_id` (`pulumi.Input[str]`) - The ID of the associated Probe.
* `probeName` (`pulumi.Input[str]`)
* `protocol` (`pulumi.Input[str]`)
* `requestTimeout` (`pulumi.Input[float]`)
The **custom_error_configurations** object supports the following:
* `customErrorPageUrl` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `statusCode` (`pulumi.Input[str]`)
The **frontend_ip_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `private_ip_address` (`pulumi.Input[str]`)
* `privateIpAddressAllocation` (`pulumi.Input[str]`)
* `publicIpAddressId` (`pulumi.Input[str]`)
* `subnet_id` (`pulumi.Input[str]`)
The **frontend_ports** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `port` (`pulumi.Input[float]`)
The **gateway_ip_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `subnet_id` (`pulumi.Input[str]`)
The **http_listeners** object supports the following:
* `custom_error_configurations` (`pulumi.Input[list]`) - One or more `custom_error_configuration` blocks as defined below.
* `customErrorPageUrl` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `statusCode` (`pulumi.Input[str]`)
* `frontend_ip_configuration_id` (`pulumi.Input[str]`) - The ID of the associated Frontend Configuration.
* `frontend_ip_configuration_name` (`pulumi.Input[str]`)
* `frontendPortId` (`pulumi.Input[str]`) - The ID of the associated Frontend Port.
* `frontendPortName` (`pulumi.Input[str]`)
* `host_name` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `protocol` (`pulumi.Input[str]`)
* `requireSni` (`pulumi.Input[bool]`)
* `sslCertificateId` (`pulumi.Input[str]`) - The ID of the associated SSL Certificate.
* `sslCertificateName` (`pulumi.Input[str]`)
The **identity** object supports the following:
* `identityIds` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **probes** object supports the following:
* `host` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `interval` (`pulumi.Input[float]`)
* `match` (`pulumi.Input[dict]`)
* `body` (`pulumi.Input[str]`)
* `statusCodes` (`pulumi.Input[list]`)
* `minimumServers` (`pulumi.Input[float]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`pulumi.Input[str]`)
* `pickHostNameFromBackendHttpSettings` (`pulumi.Input[bool]`)
* `protocol` (`pulumi.Input[str]`)
* `timeout` (`pulumi.Input[float]`)
* `unhealthyThreshold` (`pulumi.Input[float]`)
The **redirect_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `includePath` (`pulumi.Input[bool]`)
* `includeQueryString` (`pulumi.Input[bool]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectType` (`pulumi.Input[str]`)
* `targetListenerId` (`pulumi.Input[str]`)
* `targetListenerName` (`pulumi.Input[str]`)
* `targetUrl` (`pulumi.Input[str]`)
The **request_routing_rules** object supports the following:
* `backend_address_pool_id` (`pulumi.Input[str]`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`pulumi.Input[str]`)
* `backendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`pulumi.Input[str]`)
* `httpListenerId` (`pulumi.Input[str]`) - The ID of the associated HTTP Listener.
* `httpListenerName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectConfigurationId` (`pulumi.Input[str]`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`pulumi.Input[str]`)
* `rewriteRuleSetId` (`pulumi.Input[str]`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`pulumi.Input[str]`)
* `ruleType` (`pulumi.Input[str]`)
* `urlPathMapId` (`pulumi.Input[str]`) - The ID of the associated URL Path Map.
* `urlPathMapName` (`pulumi.Input[str]`)
The **rewrite_rule_sets** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `rewriteRules` (`pulumi.Input[list]`)
* `conditions` (`pulumi.Input[list]`)
* `ignoreCase` (`pulumi.Input[bool]`)
* `negate` (`pulumi.Input[bool]`)
* `pattern` (`pulumi.Input[str]`)
* `variable` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `requestHeaderConfigurations` (`pulumi.Input[list]`)
* `headerName` (`pulumi.Input[str]`)
* `headerValue` (`pulumi.Input[str]`)
* `responseHeaderConfigurations` (`pulumi.Input[list]`)
* `headerName` (`pulumi.Input[str]`)
* `headerValue` (`pulumi.Input[str]`)
* `ruleSequence` (`pulumi.Input[float]`)
The **sku** object supports the following:
* `capacity` (`pulumi.Input[float]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `tier` (`pulumi.Input[str]`)
The **ssl_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `password` (`pulumi.Input[str]`)
* `publicCertData` (`pulumi.Input[str]`) - The Public Certificate Data associated with the SSL Certificate.
The **ssl_policies** object supports the following:
* `cipherSuites` (`pulumi.Input[list]`)
* `disabledProtocols` (`pulumi.Input[list]`)
* `minProtocolVersion` (`pulumi.Input[str]`)
* `policyName` (`pulumi.Input[str]`)
* `policyType` (`pulumi.Input[str]`)
The **trusted_root_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **url_path_maps** object supports the following:
* `defaultBackendAddressPoolId` (`pulumi.Input[str]`) - The ID of the Default Backend Address Pool.
* `defaultBackendAddressPoolName` (`pulumi.Input[str]`)
* `defaultBackendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the Default Backend HTTP Settings Collection.
* `defaultBackendHttpSettingsName` (`pulumi.Input[str]`)
* `defaultRedirectConfigurationId` (`pulumi.Input[str]`) - The ID of the Default Redirect Configuration.
* `defaultRedirectConfigurationName` (`pulumi.Input[str]`)
* `defaultRewriteRuleSetId` (`pulumi.Input[str]`)
* `defaultRewriteRuleSetName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `pathRules` (`pulumi.Input[list]`) - A list of `path_rule` blocks as defined above.
* `backend_address_pool_id` (`pulumi.Input[str]`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`pulumi.Input[str]`)
* `backendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `paths` (`pulumi.Input[list]`)
* `redirectConfigurationId` (`pulumi.Input[str]`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`pulumi.Input[str]`)
* `rewriteRuleSetId` (`pulumi.Input[str]`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`pulumi.Input[str]`)
The **waf_configuration** object supports the following:
* `disabledRuleGroups` (`pulumi.Input[list]`)
* `ruleGroupName` (`pulumi.Input[str]`)
* `rules` (`pulumi.Input[list]`)
* `enabled` (`pulumi.Input[bool]`)
* `exclusions` (`pulumi.Input[list]`)
* `matchVariable` (`pulumi.Input[str]`)
* `selector` (`pulumi.Input[str]`)
* `selectorMatchOperator` (`pulumi.Input[str]`)
* `fileUploadLimitMb` (`pulumi.Input[float]`)
* `firewallMode` (`pulumi.Input[str]`)
* `maxRequestBodySizeKb` (`pulumi.Input[float]`)
* `requestBodyCheck` (`pulumi.Input[bool]`)
* `ruleSetType` (`pulumi.Input[str]`)
* `ruleSetVersion` (`pulumi.Input[str]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/application_gateway.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['authentication_certificates'] = authentication_certificates
__props__['autoscale_configuration'] = autoscale_configuration
if backend_address_pools is None:
raise TypeError("Missing required property 'backend_address_pools'")
__props__['backend_address_pools'] = backend_address_pools
if backend_http_settings is None:
raise TypeError("Missing required property 'backend_http_settings'")
__props__['backend_http_settings'] = backend_http_settings
__props__['custom_error_configurations'] = custom_error_configurations
__props__['disabled_ssl_protocols'] = disabled_ssl_protocols
__props__['enable_http2'] = enable_http2
if frontend_ip_configurations is None:
raise TypeError("Missing required property 'frontend_ip_configurations'")
__props__['frontend_ip_configurations'] = frontend_ip_configurations
if frontend_ports is None:
raise TypeError("Missing required property 'frontend_ports'")
__props__['frontend_ports'] = frontend_ports
if gateway_ip_configurations is None:
raise TypeError("Missing required property 'gateway_ip_configurations'")
__props__['gateway_ip_configurations'] = gateway_ip_configurations
if http_listeners is None:
raise TypeError("Missing required property 'http_listeners'")
__props__['http_listeners'] = http_listeners
__props__['identity'] = identity
__props__['location'] = location
__props__['name'] = name
__props__['probes'] = probes
__props__['redirect_configurations'] = redirect_configurations
if request_routing_rules is None:
raise TypeError("Missing required property 'request_routing_rules'")
__props__['request_routing_rules'] = request_routing_rules
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['rewrite_rule_sets'] = rewrite_rule_sets
if sku is None:
raise TypeError("Missing required property 'sku'")
__props__['sku'] = sku
__props__['ssl_certificates'] = ssl_certificates
__props__['ssl_policies'] = ssl_policies
__props__['tags'] = tags
__props__['trusted_root_certificates'] = trusted_root_certificates
__props__['url_path_maps'] = url_path_maps
__props__['waf_configuration'] = waf_configuration
__props__['zones'] = zones
super(ApplicationGateway, __self__).__init__(
'azure:network/applicationGateway:ApplicationGateway',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, authentication_certificates=None, autoscale_configuration=None, backend_address_pools=None, backend_http_settings=None, custom_error_configurations=None, disabled_ssl_protocols=None, enable_http2=None, frontend_ip_configurations=None, frontend_ports=None, gateway_ip_configurations=None, http_listeners=None, identity=None, location=None, name=None, probes=None, redirect_configurations=None, request_routing_rules=None, resource_group_name=None, rewrite_rule_sets=None, sku=None, ssl_certificates=None, ssl_policies=None, tags=None, trusted_root_certificates=None, url_path_maps=None, waf_configuration=None, zones=None):
"""
Get an existing ApplicationGateway resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] authentication_certificates: One or more `authentication_certificate` blocks as defined below.
:param pulumi.Input[dict] autoscale_configuration: A `autoscale_configuration` block as defined below.
:param pulumi.Input[list] backend_address_pools: One or more `backend_address_pool` blocks as defined below.
:param pulumi.Input[list] backend_http_settings: One or more `backend_http_settings` blocks as defined below.
:param pulumi.Input[list] custom_error_configurations: One or more `custom_error_configuration` blocks as defined below.
:param pulumi.Input[list] disabled_ssl_protocols: A list of SSL Protocols which should be disabled on this Application Gateway. Possible values are `TLSv1_0`, `TLSv1_1` and `TLSv1_2`.
> **NOTE:** `disabled_ssl_protocols ` has been deprecated in favour of `disabled_protocols` in the `ssl_policy` block.
:param pulumi.Input[bool] enable_http2: Is HTTP2 enabled on the application gateway resource? Defaults to `false`.
:param pulumi.Input[list] frontend_ip_configurations: One or more `frontend_ip_configuration` blocks as defined below.
:param pulumi.Input[list] frontend_ports: One or more `frontend_port` blocks as defined below.
:param pulumi.Input[list] gateway_ip_configurations: One or more `gateway_ip_configuration` blocks as defined below.
:param pulumi.Input[list] http_listeners: One or more `http_listener` blocks as defined below.
:param pulumi.Input[dict] identity: A `identity` block.
:param pulumi.Input[str] location: The Azure region where the Application Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Application Gateway. Changing this forces a new resource to be created.
:param pulumi.Input[list] probes: One or more `probe` blocks as defined below.
:param pulumi.Input[list] redirect_configurations: A `redirect_configuration` block as defined below.
:param pulumi.Input[list] request_routing_rules: One or more `request_routing_rule` blocks as defined below.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to the Application Gateway should exist. Changing this forces a new resource to be created.
:param pulumi.Input[list] rewrite_rule_sets: One or more `rewrite_rule_set` blocks as defined below. Only valid for v2 SKUs.
:param pulumi.Input[dict] sku: A `sku` block as defined below.
:param pulumi.Input[list] ssl_certificates: One or more `ssl_certificate` blocks as defined below.
:param pulumi.Input[list] ssl_policies: a `ssl policy` block as defined below.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[list] trusted_root_certificates: One or more `trusted_root_certificate` blocks as defined below.
:param pulumi.Input[list] url_path_maps: One or more `url_path_map` blocks as defined below.
:param pulumi.Input[dict] waf_configuration: A `waf_configuration` block as defined below.
:param pulumi.Input[list] zones: A collection of availability zones to spread the Application Gateway over.
The **authentication_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **autoscale_configuration** object supports the following:
* `maxCapacity` (`pulumi.Input[float]`)
* `minCapacity` (`pulumi.Input[float]`)
The **backend_address_pools** object supports the following:
* `fqdnLists` (`pulumi.Input[list]`)
* `fqdns` (`pulumi.Input[list]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `ipAddressLists` (`pulumi.Input[list]`)
* `ipAddresses` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **backend_http_settings** object supports the following:
* `affinityCookieName` (`pulumi.Input[str]`)
* `authentication_certificates` (`pulumi.Input[list]`) - One or more `authentication_certificate` blocks as defined below.
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `connectionDraining` (`pulumi.Input[dict]`)
* `drainTimeoutSec` (`pulumi.Input[float]`)
* `enabled` (`pulumi.Input[bool]`)
* `cookieBasedAffinity` (`pulumi.Input[str]`)
* `host_name` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`pulumi.Input[str]`)
* `pickHostNameFromBackendAddress` (`pulumi.Input[bool]`)
* `port` (`pulumi.Input[float]`)
* `probe_id` (`pulumi.Input[str]`) - The ID of the associated Probe.
* `probeName` (`pulumi.Input[str]`)
* `protocol` (`pulumi.Input[str]`)
* `requestTimeout` (`pulumi.Input[float]`)
The **custom_error_configurations** object supports the following:
* `customErrorPageUrl` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `statusCode` (`pulumi.Input[str]`)
The **frontend_ip_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `private_ip_address` (`pulumi.Input[str]`)
* `privateIpAddressAllocation` (`pulumi.Input[str]`)
* `publicIpAddressId` (`pulumi.Input[str]`)
* `subnet_id` (`pulumi.Input[str]`)
The **frontend_ports** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `port` (`pulumi.Input[float]`)
The **gateway_ip_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `subnet_id` (`pulumi.Input[str]`)
The **http_listeners** object supports the following:
* `custom_error_configurations` (`pulumi.Input[list]`) - One or more `custom_error_configuration` blocks as defined below.
* `customErrorPageUrl` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `statusCode` (`pulumi.Input[str]`)
* `frontend_ip_configuration_id` (`pulumi.Input[str]`) - The ID of the associated Frontend Configuration.
* `frontend_ip_configuration_name` (`pulumi.Input[str]`)
* `frontendPortId` (`pulumi.Input[str]`) - The ID of the associated Frontend Port.
* `frontendPortName` (`pulumi.Input[str]`)
* `host_name` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `protocol` (`pulumi.Input[str]`)
* `requireSni` (`pulumi.Input[bool]`)
* `sslCertificateId` (`pulumi.Input[str]`) - The ID of the associated SSL Certificate.
* `sslCertificateName` (`pulumi.Input[str]`)
The **identity** object supports the following:
* `identityIds` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **probes** object supports the following:
* `host` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `interval` (`pulumi.Input[float]`)
* `match` (`pulumi.Input[dict]`)
* `body` (`pulumi.Input[str]`)
* `statusCodes` (`pulumi.Input[list]`)
* `minimumServers` (`pulumi.Input[float]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `path` (`pulumi.Input[str]`)
* `pickHostNameFromBackendHttpSettings` (`pulumi.Input[bool]`)
* `protocol` (`pulumi.Input[str]`)
* `timeout` (`pulumi.Input[float]`)
* `unhealthyThreshold` (`pulumi.Input[float]`)
The **redirect_configurations** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `includePath` (`pulumi.Input[bool]`)
* `includeQueryString` (`pulumi.Input[bool]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectType` (`pulumi.Input[str]`)
* `targetListenerId` (`pulumi.Input[str]`)
* `targetListenerName` (`pulumi.Input[str]`)
* `targetUrl` (`pulumi.Input[str]`)
The **request_routing_rules** object supports the following:
* `backend_address_pool_id` (`pulumi.Input[str]`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`pulumi.Input[str]`)
* `backendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`pulumi.Input[str]`)
* `httpListenerId` (`pulumi.Input[str]`) - The ID of the associated HTTP Listener.
* `httpListenerName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `redirectConfigurationId` (`pulumi.Input[str]`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`pulumi.Input[str]`)
* `rewriteRuleSetId` (`pulumi.Input[str]`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`pulumi.Input[str]`)
* `ruleType` (`pulumi.Input[str]`)
* `urlPathMapId` (`pulumi.Input[str]`) - The ID of the associated URL Path Map.
* `urlPathMapName` (`pulumi.Input[str]`)
The **rewrite_rule_sets** object supports the following:
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `rewriteRules` (`pulumi.Input[list]`)
* `conditions` (`pulumi.Input[list]`)
* `ignoreCase` (`pulumi.Input[bool]`)
* `negate` (`pulumi.Input[bool]`)
* `pattern` (`pulumi.Input[str]`)
* `variable` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `requestHeaderConfigurations` (`pulumi.Input[list]`)
* `headerName` (`pulumi.Input[str]`)
* `headerValue` (`pulumi.Input[str]`)
* `responseHeaderConfigurations` (`pulumi.Input[list]`)
* `headerName` (`pulumi.Input[str]`)
* `headerValue` (`pulumi.Input[str]`)
* `ruleSequence` (`pulumi.Input[float]`)
The **sku** object supports the following:
* `capacity` (`pulumi.Input[float]`)
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `tier` (`pulumi.Input[str]`)
The **ssl_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `password` (`pulumi.Input[str]`)
* `publicCertData` (`pulumi.Input[str]`) - The Public Certificate Data associated with the SSL Certificate.
The **ssl_policies** object supports the following:
* `cipherSuites` (`pulumi.Input[list]`)
* `disabledProtocols` (`pulumi.Input[list]`)
* `minProtocolVersion` (`pulumi.Input[str]`)
* `policyName` (`pulumi.Input[str]`)
* `policyType` (`pulumi.Input[str]`)
The **trusted_root_certificates** object supports the following:
* `data` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
The **url_path_maps** object supports the following:
* `defaultBackendAddressPoolId` (`pulumi.Input[str]`) - The ID of the Default Backend Address Pool.
* `defaultBackendAddressPoolName` (`pulumi.Input[str]`)
* `defaultBackendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the Default Backend HTTP Settings Collection.
* `defaultBackendHttpSettingsName` (`pulumi.Input[str]`)
* `defaultRedirectConfigurationId` (`pulumi.Input[str]`) - The ID of the Default Redirect Configuration.
* `defaultRedirectConfigurationName` (`pulumi.Input[str]`)
* `defaultRewriteRuleSetId` (`pulumi.Input[str]`)
* `defaultRewriteRuleSetName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `pathRules` (`pulumi.Input[list]`) - A list of `path_rule` blocks as defined above.
* `backend_address_pool_id` (`pulumi.Input[str]`) - The ID of the associated Backend Address Pool.
* `backendAddressPoolName` (`pulumi.Input[str]`)
* `backendHttpSettingsId` (`pulumi.Input[str]`) - The ID of the associated Backend HTTP Settings Configuration.
* `backendHttpSettingsName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - The ID of the Rewrite Rule Set
* `name` (`pulumi.Input[str]`) - The name of the Application Gateway. Changing this forces a new resource to be created.
* `paths` (`pulumi.Input[list]`)
* `redirectConfigurationId` (`pulumi.Input[str]`) - The ID of the associated Redirect Configuration.
* `redirectConfigurationName` (`pulumi.Input[str]`)
* `rewriteRuleSetId` (`pulumi.Input[str]`) - The ID of the associated Rewrite Rule Set.
* `rewriteRuleSetName` (`pulumi.Input[str]`)
The **waf_configuration** object supports the following:
* `disabledRuleGroups` (`pulumi.Input[list]`)
* `ruleGroupName` (`pulumi.Input[str]`)
* `rules` (`pulumi.Input[list]`)
* `enabled` (`pulumi.Input[bool]`)
* `exclusions` (`pulumi.Input[list]`)
* `matchVariable` (`pulumi.Input[str]`)
* `selector` (`pulumi.Input[str]`)
* `selectorMatchOperator` (`pulumi.Input[str]`)
* `fileUploadLimitMb` (`pulumi.Input[float]`)
* `firewallMode` (`pulumi.Input[str]`)
* `maxRequestBodySizeKb` (`pulumi.Input[float]`)
* `requestBodyCheck` (`pulumi.Input[bool]`)
* `ruleSetType` (`pulumi.Input[str]`)
* `ruleSetVersion` (`pulumi.Input[str]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/application_gateway.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["authentication_certificates"] = authentication_certificates
__props__["autoscale_configuration"] = autoscale_configuration
__props__["backend_address_pools"] = backend_address_pools
__props__["backend_http_settings"] = backend_http_settings
__props__["custom_error_configurations"] = custom_error_configurations
__props__["disabled_ssl_protocols"] = disabled_ssl_protocols
__props__["enable_http2"] = enable_http2
__props__["frontend_ip_configurations"] = frontend_ip_configurations
__props__["frontend_ports"] = frontend_ports
__props__["gateway_ip_configurations"] = gateway_ip_configurations
__props__["http_listeners"] = http_listeners
__props__["identity"] = identity
__props__["location"] = location
__props__["name"] = name
__props__["probes"] = probes
__props__["redirect_configurations"] = redirect_configurations
__props__["request_routing_rules"] = request_routing_rules
__props__["resource_group_name"] = resource_group_name
__props__["rewrite_rule_sets"] = rewrite_rule_sets
__props__["sku"] = sku
__props__["ssl_certificates"] = ssl_certificates
__props__["ssl_policies"] = ssl_policies
__props__["tags"] = tags
__props__["trusted_root_certificates"] = trusted_root_certificates
__props__["url_path_maps"] = url_path_maps
__props__["waf_configuration"] = waf_configuration
__props__["zones"] = zones
return ApplicationGateway(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 54.217217
| 721
| 0.63043
| 6,006
| 54,163
| 5.54329
| 0.054113
| 0.126874
| 0.104286
| 0.065359
| 0.902832
| 0.892981
| 0.887844
| 0.862494
| 0.850569
| 0.83498
| 0
| 0.000836
| 0.249506
| 54,163
| 998
| 722
| 54.271543
| 0.8182
| 0.553311
| 0
| 0.014599
| 1
| 0
| 0.183289
| 0.082977
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029197
| false
| 0.007299
| 0.043796
| 0.014599
| 0.29927
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce95f390733408c5e48ea1a2c88946c87244c0e4
| 5,441
|
py
|
Python
|
src/standard.py
|
Genius1512/console-based
|
f81d0acb0d547a9828d472abc393ee29a14c2bb0
|
[
"MIT"
] | 1
|
2021-09-20T10:41:40.000Z
|
2021-09-20T10:41:40.000Z
|
src/standard.py
|
Genius1512/console-based
|
f81d0acb0d547a9828d472abc393ee29a14c2bb0
|
[
"MIT"
] | 1
|
2021-09-20T10:41:16.000Z
|
2021-09-20T11:23:10.000Z
|
src/standard.py
|
Genius1512/console-based
|
f81d0acb0d547a9828d472abc393ee29a14c2bb0
|
[
"MIT"
] | null | null | null |
level = [
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", "o", "o", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", "o", "o", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", "o", "o", "o", "o", ".", ".", ".", ".", "o", "o", "o", "o", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
["o", "o", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", "o", "o"],
["o", "o", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", "o", "o"],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", "o", "o", "o", "o", ".", ".", ".", ".", "o", "o", "o", "o", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", "o", "o", "o", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", "o", "o", "o", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."],
[".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "o", "o", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", ".", "."]
]
| 164.878788
| 180
| 0.018563
| 97
| 5,441
| 1.041237
| 0.020619
| 1.881188
| 2.792079
| 3.683168
| 0.950495
| 0.950495
| 0.950495
| 0.950495
| 0.950495
| 0.950495
| 0
| 0
| 0.326043
| 5,441
| 32
| 181
| 170.03125
| 0.027543
| 0
| 0
| 0.90625
| 0
| 0
| 0.165411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
0cbdc12832891728af13afe9b396417487314639
| 121
|
py
|
Python
|
src/raspberrypi/actions/__init__.py
|
EnricoFortunato/hawkeye
|
9acf5d5e0d37fba794f2ea8b44e705ab086a358c
|
[
"Apache-2.0"
] | null | null | null |
src/raspberrypi/actions/__init__.py
|
EnricoFortunato/hawkeye
|
9acf5d5e0d37fba794f2ea8b44e705ab086a358c
|
[
"Apache-2.0"
] | null | null | null |
src/raspberrypi/actions/__init__.py
|
EnricoFortunato/hawkeye
|
9acf5d5e0d37fba794f2ea8b44e705ab086a358c
|
[
"Apache-2.0"
] | null | null | null |
from actions.action import Action
from actions.echo_action import Echo_Action
from actions.snap_action import Snap_Action
| 40.333333
| 43
| 0.884298
| 19
| 121
| 5.421053
| 0.315789
| 0.320388
| 0.330097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 121
| 3
| 44
| 40.333333
| 0.936364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0cfd16f9616d97b20355e08c3a47794eaf20b384
| 41
|
py
|
Python
|
python/miniconda/vendored/vendor/noarch/jinja2-3.0.0-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | 1
|
2021-11-08T01:25:40.000Z
|
2021-11-08T01:25:40.000Z
|
python/miniconda/vendored/vendor/noarch/jinja2-3.0.0-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | 19
|
2021-03-10T21:30:56.000Z
|
2022-02-27T06:45:03.000Z
|
python/miniconda/vendored/vendor/noarch/jinja2-3.0.0-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | 2
|
2021-11-08T01:25:30.000Z
|
2022-01-13T07:53:38.000Z
|
print("import: 'jinja2'")
import jinja2
| 10.25
| 25
| 0.707317
| 5
| 41
| 5.8
| 0.6
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.121951
| 41
| 3
| 26
| 13.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
0b6703d849d2b30738aa01b9146d86cc1e0b34f4
| 1,497
|
py
|
Python
|
prometheus/template_backends/jinja2/globals/__init__.py
|
face-digital/prometheus_cms
|
95f8a8f90165cbcac53976d9792989f1d30b0ab9
|
[
"MIT"
] | null | null | null |
prometheus/template_backends/jinja2/globals/__init__.py
|
face-digital/prometheus_cms
|
95f8a8f90165cbcac53976d9792989f1d30b0ab9
|
[
"MIT"
] | null | null | null |
prometheus/template_backends/jinja2/globals/__init__.py
|
face-digital/prometheus_cms
|
95f8a8f90165cbcac53976d9792989f1d30b0ab9
|
[
"MIT"
] | null | null | null |
from prometheus.template_backends.jinja2.globals.datetime import date # NOQA
from prometheus.template_backends.jinja2.globals.extensions import CacheExtension # NOQA
from prometheus.template_backends.jinja2.globals.extensions import SpacelessExtension # NOQA
from prometheus.template_backends.jinja2.globals.i18n import get_language_href # NOQA
from prometheus.template_backends.jinja2.globals.i18n import get_languages # NOQA
from prometheus.template_backends.jinja2.globals.i18n import ugettext # NOQA
from prometheus.template_backends.jinja2.globals.images import cropped_thumbnail # NOQA
from prometheus.template_backends.jinja2.globals.images import thumbnail_obj # NOQA
from prometheus.template_backends.jinja2.globals.images import thumbnail # NOQA
from prometheus.template_backends.jinja2.globals.numbers import floatformat # NOQA
from prometheus.template_backends.jinja2.globals.numbers import intcomma # NOQA
from prometheus.template_backends.jinja2.globals.numbers import random_int # NOQA
from prometheus.template_backends.jinja2.globals.settings import site_name # NOQA
from prometheus.template_backends.jinja2.globals.settings import site_url # NOQA
from prometheus.template_backends.jinja2.globals.static import static # NOQA
from prometheus.template_backends.jinja2.globals.text import phone_url # NOQA
from prometheus.template_backends.jinja2.globals.text import strip_whitescapes # NOQA
from prometheus.template_backends.jinja2.globals.text import rjust # NOQA
| 78.789474
| 93
| 0.855711
| 190
| 1,497
| 6.594737
| 0.2
| 0.201117
| 0.316042
| 0.430966
| 0.865124
| 0.865124
| 0.830806
| 0.830806
| 0.781325
| 0.308061
| 0
| 0.017505
| 0.084168
| 1,497
| 18
| 94
| 83.166667
| 0.896426
| 0.059452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
e7e4febfc8004b0178dcd9fdbb44d411316d8ef6
| 3,372
|
py
|
Python
|
orders_linux.py
|
ThomasMrY/ActivationFunctionDemo
|
1f75c7964e354e7593e1fe54932877feb2961488
|
[
"MIT"
] | 12
|
2018-10-29T02:07:43.000Z
|
2022-03-18T09:56:34.000Z
|
orders_linux.py
|
ThomasMrY/Activation_function_demo
|
1f75c7964e354e7593e1fe54932877feb2961488
|
[
"MIT"
] | null | null | null |
orders_linux.py
|
ThomasMrY/Activation_function_demo
|
1f75c7964e354e7593e1fe54932877feb2961488
|
[
"MIT"
] | 6
|
2019-05-31T08:54:39.000Z
|
2021-08-15T03:09:33.000Z
|
import os
import time
# os.popen("python main.py tanh 0 2 1 4 4",'r')
# time.sleep(20)
# os.popen("python main.py tanh 0 2 1 4 5",'r')
# time.sleep(20)
# os.popen("python main.py tanh 0 2 1 4 6",'r')
# time.sleep(20)
#
# os.popen("python main.py tanh 0 2 1 6 4",'r')
# time.sleep(20)
# os.popen("python main.py tanh 0 2 1 6 5",'r')
# time.sleep(20)
# os.popen("python main.py tanh 0 2 1 6 6",'r')
# time.sleep(60)
#
# os.popen("python main.py tanh 0 2 1 8 4",'r')
# time.sleep(60)
# os.popen("python main.py tanh 0 2 1 8 5",'r')
# time.sleep(60)
# os.popen("python main.py tanh 0 2 1 8 6",'r')
# time.sleep(60)
# os.popen("python main.py selu -3.875 0 1 4 4",'r')
# time.sleep(20)
# os.popen("python main.py selu -3.875 0 1 4 5",'r')
# time.sleep(30)
# os.popen("python main.py selu -3.875 0 1 4 6",'r')
# time.sleep(30)
#
# os.popen("python main.py selu -3.875 0 1 6 4",'r')
# time.sleep(30)
# os.popen("python main.py selu -3.875 0 1 6 5",'r')
# time.sleep(30)
# os.popen("python main.py selu -3.875 0 1 6 6",'r')
# time.sleep(60)
#
# os.popen("python main.py selu -3.875 0 1 8 4",'r')
# time.sleep(60)
# os.popen("python main.py selu -3.875 0 1 8 5",'r')
# time.sleep(60)
# os.popen("python main.py selu -3.875 0 1 8 6",'r')
# time.sleep(60)
# print('tanh')
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 4 4 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 4 5 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 4 6 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 6 4 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 6 5 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 6 6 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 8 4 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 8 5 -Test_on_Datasets=True")
# os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py tanh 0 2 1 8 6 -Test_on_Datasets=True")
print('selu')
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 4 4 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 4 5 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 4 6 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 6 4 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 6 5 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 6 6 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 8 4 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 8 5 -Test_on_Datasets=True")
os.system("CUDA_VISIBLE_DEVICE=0 /usr/local/Anaconda3/bin/python main.py selu -3.875 0 1 8 6 -Test_on_Datasets=True")
| 53.52381
| 117
| 0.715302
| 710
| 3,372
| 3.295775
| 0.050704
| 0.153846
| 0.184615
| 0.130769
| 0.984615
| 0.984615
| 0.984615
| 0.964103
| 0.964103
| 0.963248
| 0
| 0.102892
| 0.118031
| 3,372
| 63
| 118
| 53.52381
| 0.683927
| 0.644425
| 0
| 0
| 0
| 0.75
| 0.818103
| 0.579634
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.083333
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f002e19dbca44607c6b165e4c6b046389710fa1b
| 14,517
|
py
|
Python
|
graf/grafJacobson/Graf.py
|
fikrialwi/werula
|
127d57fc2ffbb7a130dd4948b6e3871f1e614f8f
|
[
"Unlicense"
] | null | null | null |
graf/grafJacobson/Graf.py
|
fikrialwi/werula
|
127d57fc2ffbb7a130dd4948b6e3871f1e614f8f
|
[
"Unlicense"
] | null | null | null |
graf/grafJacobson/Graf.py
|
fikrialwi/werula
|
127d57fc2ffbb7a130dd4948b6e3871f1e614f8f
|
[
"Unlicense"
] | null | null | null |
import Bilangan as bil
import abc
class GrafofRing:
def __init__(self, ring, operation):
self.ring = ring
self.operation = operation
# __Modulo = bil.Modulo(self.getNumber(),self.operation)
def getNumber(self):
return int(self.ring.split('_')[-1])
def getOperation(self):
return self.operation
def getRing(self):
return bil.Modulo(self.getNumber(),self.getOperation())
@abc.abstractclassmethod
def edge(self):
return self
@abc.abstractclassmethod
def vertex(self):
return self
@abc.abstractclassmethod
def matrixAdjencey(self):
return self
class GrafJacobson(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getRing(self):
return super().getRing()
def getOperation(self):
return super().getOperation()
def getNumber(self):
return super().getNumber()
def getJacobson(self):
return bil.Jacobson(self.getNumber(),self.getOperation())
def get_jacobson(self):
return self.getJacobson().jacobson()
def vertex(self):
element = self.getJacobson().element()
jacobson = self.getJacobson().jacobson()[0]
vertex = []
if type(jacobson) == list:
[vertex.append(i) for i in element if i not in jacobson]
else:
[vertex.append(i) for i in element if i != 0]
return vertex
def edge(self):
edge = []
for i in self.vertex():
for j in self.vertex():
if i != j:
e = (1-(i*j))%self.getNumber()
if e not in self.getRing().unit():
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex():
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
for i in self.vertex():
if self.isEdgeof(i,vertex) :
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (1-vertex1*vertex2)%self.getNumber() not in self.getRing().unit()
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafUnit(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getUnit(self):
return self.getRing().unit()
def vertex(self):
return self.getRing().element()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and (i+j)%self.getNumber() in self.getUnit():
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
for i in self.vertex():
if self.isEdgeof(i,vertex) :
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (vertex1*vertex2)%self.getNumber() in self.getRing().unit()
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafZeroDivisor(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def vertex(self):
return self.getRing().zeroDivisor()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and i*j%self.getNumber() == 0:
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
for i in self.vertex():
if self.isEdgeof(i,vertex) :
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (vertex1*vertex2)%self.getNumber() == 0
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafTotal(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getZeroDivisor(self):
return self.getRing().zeroDivisor()
def vertex(self):
return self.getRing().element()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and self.isEdgeof(i,j):
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
if self.isEdgeof(i,vertex) :
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (vertex1+vertex2)%self.getNumber() in self.getZeroDivisor()
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafTotalZeroDivisor(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getZeroDivisor(self):
return self.getRing().zeroDivisor()
def vertex(self):
return self.getZeroDivisor()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and self.isEdgeof(i,j) :
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
if self.isEdgeof(i,vertex):
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (vertex1*vertex2)%self.getNumber() in self.getZeroDivisor() and (vertex1+vertex2)%self.getNumber() == 0
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafIdentity(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getUnit(self):
return self.getRing().unit()
def vertex(self):
return self.getUnit()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and self.isEdgeof(i,j):
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
if self.isEdgeof(i,vertex):
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
return (vertex1*vertex2)%self.getNumber() == 1
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafAnnihilator(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getZeroDivisor(self):
return self.getRing().zeroDivisor()
def vertex(self):
return self.getZeroDivisor()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
ring = self.getRing()
if i != j and self.isEdgeof(i,j):
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
if self.isEdgeof(i,vertex):
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
ring = self.getRing()
return self.getRing().union(ring.annihilator(vertex1), ring.annihilator(vertex2)) != ring.annihilator(vertex1*vertex2%self.getNumber())
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
class GrafMaximal(GrafofRing):
def __init__(self, ring, operation):
super().__init__(ring, operation)
def getNumber(self):
return super().getNumber()
def getRing(self):
return super().getRing()
def getIdealMax(self):
return self.getRing().idealMax()
def vertex(self):
return self.getRing().element()
def edge(self):
vertex = self.vertex()
edge = []
for i in vertex:
for j in vertex:
if i != j and self.isEdgeof(i,j):
edge.append({i,j})
edge = self.getRing().uniq(edge)
return list(map(tuple,edge))
def orde(self):
return len(self.vertex())
def size(self):
return len(self.edge())
def edgeofVertex(self,vertex):
if vertex not in self.vertex() :
return "Bilangan tidak termasuk dalam himpunan titik graf"
edge = []
for i in self.vertex():
if self.isEdgeof(i,vertex):
edge.append(i)
return edge
def degEdgeofVertex(self, vertex):
return len(self.edgeofVertex(vertex))
def isEdgeof(self,vertex1,vertex2):
for i in self.getIdealMax():
if vertex1 in i and vertex2 in i:
return True
return False
def matrixAdjencey(self):
matrix = []
for i in self.vertex():
row = []
for j in self.vertex():
if i != j and self.isEdgeof(i,j):
row.append(1)
else:
row.append(0)
matrix.append(row)
return matrix
| 34.077465
| 144
| 0.526831
| 1,609
| 14,517
| 4.708515
| 0.049099
| 0.098997
| 0.058606
| 0.027719
| 0.881336
| 0.862724
| 0.842925
| 0.830913
| 0.828274
| 0.820618
| 0
| 0.006568
| 0.360267
| 14,517
| 425
| 145
| 34.157647
| 0.809196
| 0.00372
| 0
| 0.886747
| 0
| 0
| 0.028001
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.250602
| false
| 0
| 0.004819
| 0.163855
| 0.527711
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
f06c0509a190102d16ce49709248993a69fdf3da
| 927
|
py
|
Python
|
goodVibe/paths.py
|
vitorhugo-guedes/twitter-bot
|
4953d99ee50e43e234551ef7caefdd715a922bd3
|
[
"MIT"
] | 2
|
2021-09-23T21:53:22.000Z
|
2021-10-04T04:00:29.000Z
|
goodVibe/paths.py
|
vitorhugo-guedes/twitter-bot
|
4953d99ee50e43e234551ef7caefdd715a922bd3
|
[
"MIT"
] | null | null | null |
goodVibe/paths.py
|
vitorhugo-guedes/twitter-bot
|
4953d99ee50e43e234551ef7caefdd715a922bd3
|
[
"MIT"
] | null | null | null |
passwordInputPath = '//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[1]/div/div[2]/div/label/div/div[2]/div/input'
loginButtonPath = '//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[2]/div/div/span/span'
emailInputPath = '//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[1]/div/div[2]/label/div/div[2]/div/input'
loginPasswordPath = '//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[2]/div[2]/div[2]/div/div/span/span'
inputMessagePath = '//*[@id="react-root"]/div/div/div[2]/main/div/div/div/div/div/div[2]/div/div[2]/div[1]/div/div/div/div[2]/div[1]/div/div/div/div/div/div/div/div/div/label/div[1]/div/div/div/div/div[2]/div/div/div/div'
twitButtonPath = '//*[@id="react-root"]/div/div/div[2]/main/div/div/div/div/div/div[2]/div/div[2]/div[1]/div/div/div/div[2]/div[3]/div/div/div[2]/div[3]'
| 132.428571
| 221
| 0.660194
| 199
| 927
| 3.075377
| 0.090452
| 0.715686
| 0.691176
| 0.568627
| 0.834967
| 0.834967
| 0.735294
| 0.728758
| 0.668301
| 0.647059
| 0
| 0.048405
| 0.019417
| 927
| 6
| 222
| 154.5
| 0.624862
| 0
| 0
| 0
| 0
| 1
| 0.860841
| 0.860841
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 14
|
f0801493be5bbdd89eb2c11603e129e06199a06a
| 67
|
py
|
Python
|
config/utility/__init__.py
|
phirasit/TestcaseGenerator
|
443f320e927a606d9d64933b60591c67c83b6630
|
[
"MIT"
] | null | null | null |
config/utility/__init__.py
|
phirasit/TestcaseGenerator
|
443f320e927a606d9d64933b60591c67c83b6630
|
[
"MIT"
] | null | null | null |
config/utility/__init__.py
|
phirasit/TestcaseGenerator
|
443f320e927a606d9d64933b60591c67c83b6630
|
[
"MIT"
] | null | null | null |
from config.utility.map import *
from config.utility.file import *
| 22.333333
| 33
| 0.791045
| 10
| 67
| 5.3
| 0.6
| 0.377358
| 0.641509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 67
| 2
| 34
| 33.5
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b2d0eb60e6c579e6358c23e26b50d0e9cda84bcb
| 266
|
py
|
Python
|
flownmt/modules/posteriors/__init__.py
|
DeNeutoy/flowseq
|
8cb4ae00c26fbeb3e1459e3b3b90e7e9a84c3d2b
|
[
"Apache-2.0"
] | 256
|
2019-08-14T23:26:23.000Z
|
2022-03-03T16:52:14.000Z
|
flownmt/modules/posteriors/__init__.py
|
DeNeutoy/flowseq
|
8cb4ae00c26fbeb3e1459e3b3b90e7e9a84c3d2b
|
[
"Apache-2.0"
] | 8
|
2019-10-18T02:54:10.000Z
|
2021-10-01T21:30:23.000Z
|
flownmt/modules/posteriors/__init__.py
|
DeNeutoy/flowseq
|
8cb4ae00c26fbeb3e1459e3b3b90e7e9a84c3d2b
|
[
"Apache-2.0"
] | 32
|
2019-09-06T10:02:45.000Z
|
2021-12-23T06:44:37.000Z
|
from flownmt.modules.posteriors.posterior import Posterior
from flownmt.modules.posteriors.rnn import RecurrentPosterior
from flownmt.modules.posteriors.shift_rnn import ShiftRecurrentPosterior
from flownmt.modules.posteriors.transformer import TransformerPosterior
| 53.2
| 72
| 0.894737
| 29
| 266
| 8.172414
| 0.413793
| 0.185654
| 0.303797
| 0.472574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06015
| 266
| 4
| 73
| 66.5
| 0.948
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6504e8753579c53479a7d6db0ca056e2a989537e
| 187
|
py
|
Python
|
astroquery/setup_package.py
|
tcaram/astroquery
|
f6f397c5638c081e9a9cadad42827f8a961ca338
|
[
"BSD-3-Clause"
] | 1
|
2020-08-26T21:11:13.000Z
|
2020-08-26T21:11:13.000Z
|
astroquery/setup_package.py
|
tcaram/astroquery
|
f6f397c5638c081e9a9cadad42827f8a961ca338
|
[
"BSD-3-Clause"
] | 1
|
2018-11-07T21:00:18.000Z
|
2018-11-07T21:00:18.000Z
|
astroquery/setup_package.py
|
tcaram/astroquery
|
f6f397c5638c081e9a9cadad42827f8a961ca338
|
[
"BSD-3-Clause"
] | 1
|
2018-06-22T12:44:24.000Z
|
2018-06-22T12:44:24.000Z
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
def get_package_data():
return {'astroquery': ['astroquery.cfg', 'CITATION']}
| 26.714286
| 63
| 0.748663
| 25
| 187
| 5.32
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00625
| 0.144385
| 187
| 6
| 64
| 31.166667
| 0.825
| 0.326203
| 0
| 0
| 0
| 0
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e8ee27d2161848bfbc8c1224f2334da50dd46871
| 111,507
|
py
|
Python
|
sdk/python/pulumi_ec/_inputs.py
|
pulumi/pulumi-ec
|
5036647eaa06d7298cae11a593dd22a6ce35a77c
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-09T15:35:56.000Z
|
2021-11-09T15:35:56.000Z
|
sdk/python/pulumi_ec/_inputs.py
|
pulumi/pulumi-ec
|
5036647eaa06d7298cae11a593dd22a6ce35a77c
|
[
"ECL-2.0",
"Apache-2.0"
] | 29
|
2021-11-03T12:51:54.000Z
|
2022-03-31T15:25:30.000Z
|
sdk/python/pulumi_ec/_inputs.py
|
pulumi/pulumi-ec
|
5036647eaa06d7298cae11a593dd22a6ce35a77c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'DeploymentApmArgs',
'DeploymentApmConfigArgs',
'DeploymentApmTopologyArgs',
'DeploymentElasticsearchArgs',
'DeploymentElasticsearchConfigArgs',
'DeploymentElasticsearchExtensionArgs',
'DeploymentElasticsearchRemoteClusterArgs',
'DeploymentElasticsearchSnapshotSourceArgs',
'DeploymentElasticsearchTopologyArgs',
'DeploymentElasticsearchTopologyAutoscalingArgs',
'DeploymentElasticsearchTopologyConfigArgs',
'DeploymentElasticsearchTrustAccountArgs',
'DeploymentElasticsearchTrustExternalArgs',
'DeploymentEnterpriseSearchArgs',
'DeploymentEnterpriseSearchConfigArgs',
'DeploymentEnterpriseSearchTopologyArgs',
'DeploymentIntegrationsServerArgs',
'DeploymentIntegrationsServerConfigArgs',
'DeploymentIntegrationsServerTopologyArgs',
'DeploymentKibanaArgs',
'DeploymentKibanaConfigArgs',
'DeploymentKibanaTopologyArgs',
'DeploymentObservabilityArgs',
'DeploymentTrafficFilterRuleArgs',
'GetDeploymentsApmArgs',
'GetDeploymentsElasticsearchArgs',
'GetDeploymentsEnterpriseSearchArgs',
'GetDeploymentsIntegrationsServerArgs',
'GetDeploymentsKibanaArgs',
]
@pulumi.input_type
class DeploymentApmArgs:
def __init__(__self__, *,
config: Optional[pulumi.Input['DeploymentApmConfigArgs']] = None,
elasticsearch_cluster_ref_id: Optional[pulumi.Input[str]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
https_endpoint: Optional[pulumi.Input[str]] = None,
ref_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
topologies: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentApmTopologyArgs']]]] = None):
"""
:param pulumi.Input['DeploymentApmConfigArgs'] config: APM settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[str] elasticsearch_cluster_ref_id: This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[str] ref_id: Can be set on the APM resource. The default value `main-apm` is recommended.
:param pulumi.Input[str] region: Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentApmTopologyArgs']]] topologies: Can be set multiple times to compose complex topologies.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if elasticsearch_cluster_ref_id is not None:
pulumi.set(__self__, "elasticsearch_cluster_ref_id", elasticsearch_cluster_ref_id)
if http_endpoint is not None:
pulumi.set(__self__, "http_endpoint", http_endpoint)
if https_endpoint is not None:
pulumi.set(__self__, "https_endpoint", https_endpoint)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if region is not None:
pulumi.set(__self__, "region", region)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if topologies is not None:
pulumi.set(__self__, "topologies", topologies)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input['DeploymentApmConfigArgs']]:
"""
APM settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input['DeploymentApmConfigArgs']]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="elasticsearchClusterRefId")
def elasticsearch_cluster_ref_id(self) -> Optional[pulumi.Input[str]]:
"""
This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "elasticsearch_cluster_ref_id")
@elasticsearch_cluster_ref_id.setter
def elasticsearch_cluster_ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "elasticsearch_cluster_ref_id", value)
@property
@pulumi.getter(name="httpEndpoint")
def http_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_endpoint")
@http_endpoint.setter
def http_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_endpoint", value)
@property
@pulumi.getter(name="httpsEndpoint")
def https_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_endpoint")
@https_endpoint.setter
def https_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_endpoint", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the APM resource. The default value `main-apm` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def topologies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentApmTopologyArgs']]]]:
"""
Can be set multiple times to compose complex topologies.
"""
return pulumi.get(self, "topologies")
@topologies.setter
def topologies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentApmTopologyArgs']]]]):
pulumi.set(self, "topologies", value)
@pulumi.input_type
class DeploymentApmConfigArgs:
def __init__(__self__, *,
debug_enabled: Optional[pulumi.Input[bool]] = None,
docker_image: Optional[pulumi.Input[str]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[bool] debug_enabled: Enable debug mode for the component. Defaults to `false`.
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if debug_enabled is not None:
pulumi.set(__self__, "debug_enabled", debug_enabled)
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter(name="debugEnabled")
def debug_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Enable debug mode for the component. Defaults to `false`.
"""
return pulumi.get(self, "debug_enabled")
@debug_enabled.setter
def debug_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "debug_enabled", value)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentApmTopologyArgs:
def __init__(__self__, *,
instance_configuration_id: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
size_resource: Optional[pulumi.Input[str]] = None,
zone_count: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] instance_configuration_id: Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
:param pulumi.Input[str] size: Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
:param pulumi.Input[str] size_resource: Type of resource to which the size is assigned. Defaults to `"memory"`.
:param pulumi.Input[int] zone_count: Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
if instance_configuration_id is not None:
pulumi.set(__self__, "instance_configuration_id", instance_configuration_id)
if size is not None:
pulumi.set(__self__, "size", size)
if size_resource is not None:
pulumi.set(__self__, "size_resource", size_resource)
if zone_count is not None:
pulumi.set(__self__, "zone_count", zone_count)
@property
@pulumi.getter(name="instanceConfigurationId")
def instance_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
"""
return pulumi.get(self, "instance_configuration_id")
@instance_configuration_id.setter
def instance_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_configuration_id", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sizeResource")
def size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource to which the size is assigned. Defaults to `"memory"`.
"""
return pulumi.get(self, "size_resource")
@size_resource.setter
def size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size_resource", value)
@property
@pulumi.getter(name="zoneCount")
def zone_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "zone_count")
@zone_count.setter
def zone_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "zone_count", value)
@pulumi.input_type
class DeploymentElasticsearchArgs:
def __init__(__self__, *,
autoscale: Optional[pulumi.Input[str]] = None,
cloud_id: Optional[pulumi.Input[str]] = None,
config: Optional[pulumi.Input['DeploymentElasticsearchConfigArgs']] = None,
extensions: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchExtensionArgs']]]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
https_endpoint: Optional[pulumi.Input[str]] = None,
ref_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
remote_clusters: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchRemoteClusterArgs']]]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
snapshot_source: Optional[pulumi.Input['DeploymentElasticsearchSnapshotSourceArgs']] = None,
topologies: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyArgs']]]] = None,
trust_accounts: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustAccountArgs']]]] = None,
trust_externals: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustExternalArgs']]]] = None):
"""
:param pulumi.Input[str] autoscale: Enable or disable autoscaling. Defaults to the setting coming from the deployment template. Accepted values are `"true"` or `"false"`.
:param pulumi.Input['DeploymentElasticsearchConfigArgs'] config: Elasticsearch settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchExtensionArgs']]] extensions: Custom Elasticsearch bundles or plugins. Can be set multiple times.
:param pulumi.Input[str] ref_id: Remote Elasticsearch `ref_id`. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[str] region: Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchRemoteClusterArgs']]] remote_clusters: Elasticsearch remote clusters to configure for the Elasticsearch resource. Can be set multiple times.
:param pulumi.Input['DeploymentElasticsearchSnapshotSourceArgs'] snapshot_source: Restores data from a snapshot of another deployment.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyArgs']]] topologies: Can be set multiple times to compose complex topologies.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustAccountArgs']]] trust_accounts: The trust relationships with other ESS accounts.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustExternalArgs']]] trust_externals: The trust relationship with external entities (remote environments, remote accounts...).
"""
if autoscale is not None:
pulumi.set(__self__, "autoscale", autoscale)
if cloud_id is not None:
pulumi.set(__self__, "cloud_id", cloud_id)
if config is not None:
pulumi.set(__self__, "config", config)
if extensions is not None:
pulumi.set(__self__, "extensions", extensions)
if http_endpoint is not None:
pulumi.set(__self__, "http_endpoint", http_endpoint)
if https_endpoint is not None:
pulumi.set(__self__, "https_endpoint", https_endpoint)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if region is not None:
pulumi.set(__self__, "region", region)
if remote_clusters is not None:
pulumi.set(__self__, "remote_clusters", remote_clusters)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if snapshot_source is not None:
pulumi.set(__self__, "snapshot_source", snapshot_source)
if topologies is not None:
pulumi.set(__self__, "topologies", topologies)
if trust_accounts is not None:
pulumi.set(__self__, "trust_accounts", trust_accounts)
if trust_externals is not None:
pulumi.set(__self__, "trust_externals", trust_externals)
@property
@pulumi.getter
def autoscale(self) -> Optional[pulumi.Input[str]]:
"""
Enable or disable autoscaling. Defaults to the setting coming from the deployment template. Accepted values are `"true"` or `"false"`.
"""
return pulumi.get(self, "autoscale")
@autoscale.setter
def autoscale(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autoscale", value)
@property
@pulumi.getter(name="cloudId")
def cloud_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cloud_id")
@cloud_id.setter
def cloud_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cloud_id", value)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input['DeploymentElasticsearchConfigArgs']]:
"""
Elasticsearch settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input['DeploymentElasticsearchConfigArgs']]):
pulumi.set(self, "config", value)
@property
@pulumi.getter
def extensions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchExtensionArgs']]]]:
"""
Custom Elasticsearch bundles or plugins. Can be set multiple times.
"""
return pulumi.get(self, "extensions")
@extensions.setter
def extensions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchExtensionArgs']]]]):
pulumi.set(self, "extensions", value)
@property
@pulumi.getter(name="httpEndpoint")
def http_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_endpoint")
@http_endpoint.setter
def http_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_endpoint", value)
@property
@pulumi.getter(name="httpsEndpoint")
def https_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_endpoint")
@https_endpoint.setter
def https_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_endpoint", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Remote Elasticsearch `ref_id`. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="remoteClusters")
def remote_clusters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchRemoteClusterArgs']]]]:
"""
Elasticsearch remote clusters to configure for the Elasticsearch resource. Can be set multiple times.
"""
return pulumi.get(self, "remote_clusters")
@remote_clusters.setter
def remote_clusters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchRemoteClusterArgs']]]]):
pulumi.set(self, "remote_clusters", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter(name="snapshotSource")
def snapshot_source(self) -> Optional[pulumi.Input['DeploymentElasticsearchSnapshotSourceArgs']]:
"""
Restores data from a snapshot of another deployment.
"""
return pulumi.get(self, "snapshot_source")
@snapshot_source.setter
def snapshot_source(self, value: Optional[pulumi.Input['DeploymentElasticsearchSnapshotSourceArgs']]):
pulumi.set(self, "snapshot_source", value)
@property
@pulumi.getter
def topologies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyArgs']]]]:
"""
Can be set multiple times to compose complex topologies.
"""
return pulumi.get(self, "topologies")
@topologies.setter
def topologies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyArgs']]]]):
pulumi.set(self, "topologies", value)
@property
@pulumi.getter(name="trustAccounts")
def trust_accounts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustAccountArgs']]]]:
"""
The trust relationships with other ESS accounts.
"""
return pulumi.get(self, "trust_accounts")
@trust_accounts.setter
def trust_accounts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustAccountArgs']]]]):
pulumi.set(self, "trust_accounts", value)
@property
@pulumi.getter(name="trustExternals")
def trust_externals(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustExternalArgs']]]]:
"""
The trust relationship with external entities (remote environments, remote accounts...).
"""
return pulumi.get(self, "trust_externals")
@trust_externals.setter
def trust_externals(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTrustExternalArgs']]]]):
pulumi.set(self, "trust_externals", value)
@pulumi.input_type
class DeploymentElasticsearchConfigArgs:
def __init__(__self__, *,
docker_image: Optional[pulumi.Input[str]] = None,
plugins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] plugins: List of Elasticsearch supported plugins. Check the Stack Pack version to see which plugins are supported for each version. This is currently only available from the UI and [ecctl](https://www.elastic.co/guide/en/ecctl/master/ecctl_stack_list.html).
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if plugins is not None:
pulumi.set(__self__, "plugins", plugins)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter
def plugins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of Elasticsearch supported plugins. Check the Stack Pack version to see which plugins are supported for each version. This is currently only available from the UI and [ecctl](https://www.elastic.co/guide/en/ecctl/master/ecctl_stack_list.html).
"""
return pulumi.get(self, "plugins")
@plugins.setter
def plugins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "plugins", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentElasticsearchExtensionArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str],
url: pulumi.Input[str],
version: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: Name of the deployment.
:param pulumi.Input[str] type: Extension type, only `bundle` or `plugin` are supported.
:param pulumi.Input[str] url: Bundle or plugin URL, the extension URL can be obtained from the `ec_deployment_extension.<name>.url` attribute or the API and cannot be a random HTTP address that is hosted elsewhere.
:param pulumi.Input[str] version: Elastic Stack version to use for all the deployment resources.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "url", url)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the deployment.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Extension type, only `bundle` or `plugin` are supported.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def url(self) -> pulumi.Input[str]:
"""
Bundle or plugin URL, the extension URL can be obtained from the `ec_deployment_extension.<name>.url` attribute or the API and cannot be a random HTTP address that is hosted elsewhere.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: pulumi.Input[str]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def version(self) -> pulumi.Input[str]:
"""
Elastic Stack version to use for all the deployment resources.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: pulumi.Input[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class DeploymentElasticsearchRemoteClusterArgs:
def __init__(__self__, *,
alias: pulumi.Input[str],
deployment_id: pulumi.Input[str],
ref_id: Optional[pulumi.Input[str]] = None,
skip_unavailable: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] alias: Deployment alias, affects the format of the resource URLs.
:param pulumi.Input[str] deployment_id: Remote deployment ID.
:param pulumi.Input[str] ref_id: Can be set on the Elasticsearch resource. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[bool] skip_unavailable: If true, skip the cluster during search when disconnected. Defaults to `false`.
"""
pulumi.set(__self__, "alias", alias)
pulumi.set(__self__, "deployment_id", deployment_id)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if skip_unavailable is not None:
pulumi.set(__self__, "skip_unavailable", skip_unavailable)
@property
@pulumi.getter
def alias(self) -> pulumi.Input[str]:
"""
Deployment alias, affects the format of the resource URLs.
"""
return pulumi.get(self, "alias")
@alias.setter
def alias(self, value: pulumi.Input[str]):
pulumi.set(self, "alias", value)
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> pulumi.Input[str]:
"""
Remote deployment ID.
"""
return pulumi.get(self, "deployment_id")
@deployment_id.setter
def deployment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "deployment_id", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the Elasticsearch resource. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter(name="skipUnavailable")
def skip_unavailable(self) -> Optional[pulumi.Input[bool]]:
"""
If true, skip the cluster during search when disconnected. Defaults to `false`.
"""
return pulumi.get(self, "skip_unavailable")
@skip_unavailable.setter
def skip_unavailable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "skip_unavailable", value)
@pulumi.input_type
class DeploymentElasticsearchSnapshotSourceArgs:
def __init__(__self__, *,
source_elasticsearch_cluster_id: pulumi.Input[str],
snapshot_name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] source_elasticsearch_cluster_id: ID of the Elasticsearch cluster, not to be confused with the deployment ID, that will be used as the source of the snapshot. The Elasticsearch cluster must be in the same region and must have a compatible version of the Elastic Stack.
:param pulumi.Input[str] snapshot_name: Name of the snapshot to restore. Use `__latest_success__` to get the most recent successful snapshot (Defaults to `__latest_success__`).
"""
pulumi.set(__self__, "source_elasticsearch_cluster_id", source_elasticsearch_cluster_id)
if snapshot_name is not None:
pulumi.set(__self__, "snapshot_name", snapshot_name)
@property
@pulumi.getter(name="sourceElasticsearchClusterId")
def source_elasticsearch_cluster_id(self) -> pulumi.Input[str]:
"""
ID of the Elasticsearch cluster, not to be confused with the deployment ID, that will be used as the source of the snapshot. The Elasticsearch cluster must be in the same region and must have a compatible version of the Elastic Stack.
"""
return pulumi.get(self, "source_elasticsearch_cluster_id")
@source_elasticsearch_cluster_id.setter
def source_elasticsearch_cluster_id(self, value: pulumi.Input[str]):
pulumi.set(self, "source_elasticsearch_cluster_id", value)
@property
@pulumi.getter(name="snapshotName")
def snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the snapshot to restore. Use `__latest_success__` to get the most recent successful snapshot (Defaults to `__latest_success__`).
"""
return pulumi.get(self, "snapshot_name")
@snapshot_name.setter
def snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_name", value)
@pulumi.input_type
class DeploymentElasticsearchTopologyArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
autoscaling: Optional[pulumi.Input['DeploymentElasticsearchTopologyAutoscalingArgs']] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyConfigArgs']]]] = None,
instance_configuration_id: Optional[pulumi.Input[str]] = None,
node_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
node_type_data: Optional[pulumi.Input[str]] = None,
node_type_ingest: Optional[pulumi.Input[str]] = None,
node_type_master: Optional[pulumi.Input[str]] = None,
node_type_ml: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
size_resource: Optional[pulumi.Input[str]] = None,
zone_count: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] id: Unique topology identifier. It generally refers to an Elasticsearch data tier, such as `hot_content`, `warm`, `cold`, `coordinating`, `frozen`, `ml` or `master`.
:param pulumi.Input['DeploymentElasticsearchTopologyAutoscalingArgs'] autoscaling: Autoscaling policy defining the maximum and / or minimum total size for this topology element. For more information refer to the `autoscaling` block.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyConfigArgs']]] configs: Elasticsearch settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[str] instance_configuration_id: Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
:param pulumi.Input[str] node_type_data: The node type for the Elasticsearch cluster (data node).
:param pulumi.Input[str] node_type_ingest: The node type for the Elasticsearch cluster (ingest node).
:param pulumi.Input[str] node_type_master: The node type for the Elasticsearch cluster (master node).
:param pulumi.Input[str] node_type_ml: The node type for the Elasticsearch cluster (machine learning node).
:param pulumi.Input[str] size: Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
:param pulumi.Input[str] size_resource: Type of resource to which the size is assigned. Defaults to `"memory"`.
:param pulumi.Input[int] zone_count: Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
pulumi.set(__self__, "id", id)
if autoscaling is not None:
pulumi.set(__self__, "autoscaling", autoscaling)
if configs is not None:
pulumi.set(__self__, "configs", configs)
if instance_configuration_id is not None:
pulumi.set(__self__, "instance_configuration_id", instance_configuration_id)
if node_roles is not None:
pulumi.set(__self__, "node_roles", node_roles)
if node_type_data is not None:
pulumi.set(__self__, "node_type_data", node_type_data)
if node_type_ingest is not None:
pulumi.set(__self__, "node_type_ingest", node_type_ingest)
if node_type_master is not None:
pulumi.set(__self__, "node_type_master", node_type_master)
if node_type_ml is not None:
pulumi.set(__self__, "node_type_ml", node_type_ml)
if size is not None:
pulumi.set(__self__, "size", size)
if size_resource is not None:
pulumi.set(__self__, "size_resource", size_resource)
if zone_count is not None:
pulumi.set(__self__, "zone_count", zone_count)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
Unique topology identifier. It generally refers to an Elasticsearch data tier, such as `hot_content`, `warm`, `cold`, `coordinating`, `frozen`, `ml` or `master`.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def autoscaling(self) -> Optional[pulumi.Input['DeploymentElasticsearchTopologyAutoscalingArgs']]:
"""
Autoscaling policy defining the maximum and / or minimum total size for this topology element. For more information refer to the `autoscaling` block.
"""
return pulumi.get(self, "autoscaling")
@autoscaling.setter
def autoscaling(self, value: Optional[pulumi.Input['DeploymentElasticsearchTopologyAutoscalingArgs']]):
pulumi.set(self, "autoscaling", value)
@property
@pulumi.getter
def configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyConfigArgs']]]]:
"""
Elasticsearch settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "configs")
@configs.setter
def configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentElasticsearchTopologyConfigArgs']]]]):
pulumi.set(self, "configs", value)
@property
@pulumi.getter(name="instanceConfigurationId")
def instance_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
"""
return pulumi.get(self, "instance_configuration_id")
@instance_configuration_id.setter
def instance_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_configuration_id", value)
@property
@pulumi.getter(name="nodeRoles")
def node_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "node_roles")
@node_roles.setter
def node_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "node_roles", value)
@property
@pulumi.getter(name="nodeTypeData")
def node_type_data(self) -> Optional[pulumi.Input[str]]:
"""
The node type for the Elasticsearch cluster (data node).
"""
return pulumi.get(self, "node_type_data")
@node_type_data.setter
def node_type_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_data", value)
@property
@pulumi.getter(name="nodeTypeIngest")
def node_type_ingest(self) -> Optional[pulumi.Input[str]]:
"""
The node type for the Elasticsearch cluster (ingest node).
"""
return pulumi.get(self, "node_type_ingest")
@node_type_ingest.setter
def node_type_ingest(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_ingest", value)
@property
@pulumi.getter(name="nodeTypeMaster")
def node_type_master(self) -> Optional[pulumi.Input[str]]:
"""
The node type for the Elasticsearch cluster (master node).
"""
return pulumi.get(self, "node_type_master")
@node_type_master.setter
def node_type_master(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_master", value)
@property
@pulumi.getter(name="nodeTypeMl")
def node_type_ml(self) -> Optional[pulumi.Input[str]]:
"""
The node type for the Elasticsearch cluster (machine learning node).
"""
return pulumi.get(self, "node_type_ml")
@node_type_ml.setter
def node_type_ml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_ml", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sizeResource")
def size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource to which the size is assigned. Defaults to `"memory"`.
"""
return pulumi.get(self, "size_resource")
@size_resource.setter
def size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size_resource", value)
@property
@pulumi.getter(name="zoneCount")
def zone_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "zone_count")
@zone_count.setter
def zone_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "zone_count", value)
@pulumi.input_type
class DeploymentElasticsearchTopologyAutoscalingArgs:
def __init__(__self__, *,
max_size: Optional[pulumi.Input[str]] = None,
max_size_resource: Optional[pulumi.Input[str]] = None,
min_size: Optional[pulumi.Input[str]] = None,
min_size_resource: Optional[pulumi.Input[str]] = None,
policy_override_json: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] max_size: Defines the maximum size the deployment will scale up to. When set, scaling up will be enabled. All tiers should support this option.
:param pulumi.Input[str] max_size_resource: Defines the resource type the scale up will use (Defaults to `"memory"`).
:param pulumi.Input[str] min_size: Defines the minimum size the deployment will scale down to. When set, scale down will be enabled, please note that not all the tiers support this option.
:param pulumi.Input[str] min_size_resource: Defines the resource type the scale down will use (Defaults to `"memory"`).
"""
if max_size is not None:
pulumi.set(__self__, "max_size", max_size)
if max_size_resource is not None:
pulumi.set(__self__, "max_size_resource", max_size_resource)
if min_size is not None:
pulumi.set(__self__, "min_size", min_size)
if min_size_resource is not None:
pulumi.set(__self__, "min_size_resource", min_size_resource)
if policy_override_json is not None:
pulumi.set(__self__, "policy_override_json", policy_override_json)
@property
@pulumi.getter(name="maxSize")
def max_size(self) -> Optional[pulumi.Input[str]]:
"""
Defines the maximum size the deployment will scale up to. When set, scaling up will be enabled. All tiers should support this option.
"""
return pulumi.get(self, "max_size")
@max_size.setter
def max_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_size", value)
@property
@pulumi.getter(name="maxSizeResource")
def max_size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Defines the resource type the scale up will use (Defaults to `"memory"`).
"""
return pulumi.get(self, "max_size_resource")
@max_size_resource.setter
def max_size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_size_resource", value)
@property
@pulumi.getter(name="minSize")
def min_size(self) -> Optional[pulumi.Input[str]]:
"""
Defines the minimum size the deployment will scale down to. When set, scale down will be enabled, please note that not all the tiers support this option.
"""
return pulumi.get(self, "min_size")
@min_size.setter
def min_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "min_size", value)
@property
@pulumi.getter(name="minSizeResource")
def min_size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Defines the resource type the scale down will use (Defaults to `"memory"`).
"""
return pulumi.get(self, "min_size_resource")
@min_size_resource.setter
def min_size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "min_size_resource", value)
@property
@pulumi.getter(name="policyOverrideJson")
def policy_override_json(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "policy_override_json")
@policy_override_json.setter
def policy_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_override_json", value)
@pulumi.input_type
class DeploymentElasticsearchTopologyConfigArgs:
def __init__(__self__, *,
plugins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] plugins: List of Elasticsearch supported plugins. Check the Stack Pack version to see which plugins are supported for each version. This is currently only available from the UI and [ecctl](https://www.elastic.co/guide/en/ecctl/master/ecctl_stack_list.html).
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if plugins is not None:
pulumi.set(__self__, "plugins", plugins)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter
def plugins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of Elasticsearch supported plugins. Check the Stack Pack version to see which plugins are supported for each version. This is currently only available from the UI and [ecctl](https://www.elastic.co/guide/en/ecctl/master/ecctl_stack_list.html).
"""
return pulumi.get(self, "plugins")
@plugins.setter
def plugins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "plugins", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentElasticsearchTrustAccountArgs:
def __init__(__self__, *,
account_id: pulumi.Input[str],
trust_all: pulumi.Input[bool],
trust_allowlists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] account_id: The account identifier to establish the new trust with.
:param pulumi.Input[bool] trust_all: If true, all clusters in this account will by default be trusted and the `trust_allowlist` is ignored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] trust_allowlists: The list of clusters to trust. Only used when `trust_all` is `false`.
"""
pulumi.set(__self__, "account_id", account_id)
pulumi.set(__self__, "trust_all", trust_all)
if trust_allowlists is not None:
pulumi.set(__self__, "trust_allowlists", trust_allowlists)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Input[str]:
"""
The account identifier to establish the new trust with.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: pulumi.Input[str]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="trustAll")
def trust_all(self) -> pulumi.Input[bool]:
"""
If true, all clusters in this account will by default be trusted and the `trust_allowlist` is ignored.
"""
return pulumi.get(self, "trust_all")
@trust_all.setter
def trust_all(self, value: pulumi.Input[bool]):
pulumi.set(self, "trust_all", value)
@property
@pulumi.getter(name="trustAllowlists")
def trust_allowlists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of clusters to trust. Only used when `trust_all` is `false`.
"""
return pulumi.get(self, "trust_allowlists")
@trust_allowlists.setter
def trust_allowlists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "trust_allowlists", value)
@pulumi.input_type
class DeploymentElasticsearchTrustExternalArgs:
def __init__(__self__, *,
relationship_id: pulumi.Input[str],
trust_all: pulumi.Input[bool],
trust_allowlists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] relationship_id: Identifier of the the trust relationship with external entities (remote environments, remote accounts...).
:param pulumi.Input[bool] trust_all: If true, all clusters in this account will by default be trusted and the `trust_allowlist` is ignored.
:param pulumi.Input[Sequence[pulumi.Input[str]]] trust_allowlists: The list of clusters to trust. Only used when `trust_all` is `false`.
"""
pulumi.set(__self__, "relationship_id", relationship_id)
pulumi.set(__self__, "trust_all", trust_all)
if trust_allowlists is not None:
pulumi.set(__self__, "trust_allowlists", trust_allowlists)
@property
@pulumi.getter(name="relationshipId")
def relationship_id(self) -> pulumi.Input[str]:
"""
Identifier of the the trust relationship with external entities (remote environments, remote accounts...).
"""
return pulumi.get(self, "relationship_id")
@relationship_id.setter
def relationship_id(self, value: pulumi.Input[str]):
pulumi.set(self, "relationship_id", value)
@property
@pulumi.getter(name="trustAll")
def trust_all(self) -> pulumi.Input[bool]:
"""
If true, all clusters in this account will by default be trusted and the `trust_allowlist` is ignored.
"""
return pulumi.get(self, "trust_all")
@trust_all.setter
def trust_all(self, value: pulumi.Input[bool]):
pulumi.set(self, "trust_all", value)
@property
@pulumi.getter(name="trustAllowlists")
def trust_allowlists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of clusters to trust. Only used when `trust_all` is `false`.
"""
return pulumi.get(self, "trust_allowlists")
@trust_allowlists.setter
def trust_allowlists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "trust_allowlists", value)
@pulumi.input_type
class DeploymentEnterpriseSearchArgs:
def __init__(__self__, *,
config: Optional[pulumi.Input['DeploymentEnterpriseSearchConfigArgs']] = None,
elasticsearch_cluster_ref_id: Optional[pulumi.Input[str]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
https_endpoint: Optional[pulumi.Input[str]] = None,
ref_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
topologies: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentEnterpriseSearchTopologyArgs']]]] = None):
"""
:param pulumi.Input['DeploymentEnterpriseSearchConfigArgs'] config: Enterprise Search settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[str] elasticsearch_cluster_ref_id: This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[str] ref_id: Can be set on the Enterprise Search resource. The default value `main-enterprise_search` is recommended.
:param pulumi.Input[str] region: Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentEnterpriseSearchTopologyArgs']]] topologies: Can be set multiple times to compose complex topologies.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if elasticsearch_cluster_ref_id is not None:
pulumi.set(__self__, "elasticsearch_cluster_ref_id", elasticsearch_cluster_ref_id)
if http_endpoint is not None:
pulumi.set(__self__, "http_endpoint", http_endpoint)
if https_endpoint is not None:
pulumi.set(__self__, "https_endpoint", https_endpoint)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if region is not None:
pulumi.set(__self__, "region", region)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if topologies is not None:
pulumi.set(__self__, "topologies", topologies)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input['DeploymentEnterpriseSearchConfigArgs']]:
"""
Enterprise Search settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input['DeploymentEnterpriseSearchConfigArgs']]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="elasticsearchClusterRefId")
def elasticsearch_cluster_ref_id(self) -> Optional[pulumi.Input[str]]:
"""
This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "elasticsearch_cluster_ref_id")
@elasticsearch_cluster_ref_id.setter
def elasticsearch_cluster_ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "elasticsearch_cluster_ref_id", value)
@property
@pulumi.getter(name="httpEndpoint")
def http_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_endpoint")
@http_endpoint.setter
def http_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_endpoint", value)
@property
@pulumi.getter(name="httpsEndpoint")
def https_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_endpoint")
@https_endpoint.setter
def https_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_endpoint", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the Enterprise Search resource. The default value `main-enterprise_search` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def topologies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentEnterpriseSearchTopologyArgs']]]]:
"""
Can be set multiple times to compose complex topologies.
"""
return pulumi.get(self, "topologies")
@topologies.setter
def topologies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentEnterpriseSearchTopologyArgs']]]]):
pulumi.set(self, "topologies", value)
@pulumi.input_type
class DeploymentEnterpriseSearchConfigArgs:
def __init__(__self__, *,
docker_image: Optional[pulumi.Input[str]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentEnterpriseSearchTopologyArgs:
def __init__(__self__, *,
instance_configuration_id: Optional[pulumi.Input[str]] = None,
node_type_appserver: Optional[pulumi.Input[bool]] = None,
node_type_connector: Optional[pulumi.Input[bool]] = None,
node_type_worker: Optional[pulumi.Input[bool]] = None,
size: Optional[pulumi.Input[str]] = None,
size_resource: Optional[pulumi.Input[str]] = None,
zone_count: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] instance_configuration_id: Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
:param pulumi.Input[str] size: Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
:param pulumi.Input[str] size_resource: Type of resource to which the size is assigned. Defaults to `"memory"`.
:param pulumi.Input[int] zone_count: Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
if instance_configuration_id is not None:
pulumi.set(__self__, "instance_configuration_id", instance_configuration_id)
if node_type_appserver is not None:
pulumi.set(__self__, "node_type_appserver", node_type_appserver)
if node_type_connector is not None:
pulumi.set(__self__, "node_type_connector", node_type_connector)
if node_type_worker is not None:
pulumi.set(__self__, "node_type_worker", node_type_worker)
if size is not None:
pulumi.set(__self__, "size", size)
if size_resource is not None:
pulumi.set(__self__, "size_resource", size_resource)
if zone_count is not None:
pulumi.set(__self__, "zone_count", zone_count)
@property
@pulumi.getter(name="instanceConfigurationId")
def instance_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
"""
return pulumi.get(self, "instance_configuration_id")
@instance_configuration_id.setter
def instance_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_configuration_id", value)
@property
@pulumi.getter(name="nodeTypeAppserver")
def node_type_appserver(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "node_type_appserver")
@node_type_appserver.setter
def node_type_appserver(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "node_type_appserver", value)
@property
@pulumi.getter(name="nodeTypeConnector")
def node_type_connector(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "node_type_connector")
@node_type_connector.setter
def node_type_connector(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "node_type_connector", value)
@property
@pulumi.getter(name="nodeTypeWorker")
def node_type_worker(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "node_type_worker")
@node_type_worker.setter
def node_type_worker(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "node_type_worker", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sizeResource")
def size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource to which the size is assigned. Defaults to `"memory"`.
"""
return pulumi.get(self, "size_resource")
@size_resource.setter
def size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size_resource", value)
@property
@pulumi.getter(name="zoneCount")
def zone_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "zone_count")
@zone_count.setter
def zone_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "zone_count", value)
@pulumi.input_type
class DeploymentIntegrationsServerArgs:
def __init__(__self__, *,
config: Optional[pulumi.Input['DeploymentIntegrationsServerConfigArgs']] = None,
elasticsearch_cluster_ref_id: Optional[pulumi.Input[str]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
https_endpoint: Optional[pulumi.Input[str]] = None,
ref_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
topologies: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentIntegrationsServerTopologyArgs']]]] = None):
"""
:param pulumi.Input['DeploymentIntegrationsServerConfigArgs'] config: Integrations Server settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[str] elasticsearch_cluster_ref_id: This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[str] ref_id: Can be set on the Integrations Server resource. The default value `main-integrations_server` is recommended.
:param pulumi.Input[str] region: Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentIntegrationsServerTopologyArgs']]] topologies: Can be set multiple times to compose complex topologies.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if elasticsearch_cluster_ref_id is not None:
pulumi.set(__self__, "elasticsearch_cluster_ref_id", elasticsearch_cluster_ref_id)
if http_endpoint is not None:
pulumi.set(__self__, "http_endpoint", http_endpoint)
if https_endpoint is not None:
pulumi.set(__self__, "https_endpoint", https_endpoint)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if region is not None:
pulumi.set(__self__, "region", region)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if topologies is not None:
pulumi.set(__self__, "topologies", topologies)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input['DeploymentIntegrationsServerConfigArgs']]:
"""
Integrations Server settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input['DeploymentIntegrationsServerConfigArgs']]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="elasticsearchClusterRefId")
def elasticsearch_cluster_ref_id(self) -> Optional[pulumi.Input[str]]:
"""
This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "elasticsearch_cluster_ref_id")
@elasticsearch_cluster_ref_id.setter
def elasticsearch_cluster_ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "elasticsearch_cluster_ref_id", value)
@property
@pulumi.getter(name="httpEndpoint")
def http_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_endpoint")
@http_endpoint.setter
def http_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_endpoint", value)
@property
@pulumi.getter(name="httpsEndpoint")
def https_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_endpoint")
@https_endpoint.setter
def https_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_endpoint", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the Integrations Server resource. The default value `main-integrations_server` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def topologies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentIntegrationsServerTopologyArgs']]]]:
"""
Can be set multiple times to compose complex topologies.
"""
return pulumi.get(self, "topologies")
@topologies.setter
def topologies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentIntegrationsServerTopologyArgs']]]]):
pulumi.set(self, "topologies", value)
@pulumi.input_type
class DeploymentIntegrationsServerConfigArgs:
def __init__(__self__, *,
debug_enabled: Optional[pulumi.Input[bool]] = None,
docker_image: Optional[pulumi.Input[str]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[bool] debug_enabled: Enable debug mode for the component. Defaults to `false`.
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if debug_enabled is not None:
pulumi.set(__self__, "debug_enabled", debug_enabled)
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter(name="debugEnabled")
def debug_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Enable debug mode for the component. Defaults to `false`.
"""
return pulumi.get(self, "debug_enabled")
@debug_enabled.setter
def debug_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "debug_enabled", value)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentIntegrationsServerTopologyArgs:
def __init__(__self__, *,
instance_configuration_id: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
size_resource: Optional[pulumi.Input[str]] = None,
zone_count: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] instance_configuration_id: Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
:param pulumi.Input[str] size: Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
:param pulumi.Input[str] size_resource: Type of resource to which the size is assigned. Defaults to `"memory"`.
:param pulumi.Input[int] zone_count: Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
if instance_configuration_id is not None:
pulumi.set(__self__, "instance_configuration_id", instance_configuration_id)
if size is not None:
pulumi.set(__self__, "size", size)
if size_resource is not None:
pulumi.set(__self__, "size_resource", size_resource)
if zone_count is not None:
pulumi.set(__self__, "zone_count", zone_count)
@property
@pulumi.getter(name="instanceConfigurationId")
def instance_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
"""
return pulumi.get(self, "instance_configuration_id")
@instance_configuration_id.setter
def instance_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_configuration_id", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sizeResource")
def size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource to which the size is assigned. Defaults to `"memory"`.
"""
return pulumi.get(self, "size_resource")
@size_resource.setter
def size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size_resource", value)
@property
@pulumi.getter(name="zoneCount")
def zone_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "zone_count")
@zone_count.setter
def zone_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "zone_count", value)
@pulumi.input_type
class DeploymentKibanaArgs:
def __init__(__self__, *,
config: Optional[pulumi.Input['DeploymentKibanaConfigArgs']] = None,
elasticsearch_cluster_ref_id: Optional[pulumi.Input[str]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
https_endpoint: Optional[pulumi.Input[str]] = None,
ref_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
topologies: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentKibanaTopologyArgs']]]] = None):
"""
:param pulumi.Input['DeploymentKibanaConfigArgs'] config: Kibana settings applied to all topologies unless overridden in the `topology` element.
:param pulumi.Input[str] elasticsearch_cluster_ref_id: This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
:param pulumi.Input[str] ref_id: Can be set on the Kibana resource. The default value `main-kibana` is recommended.
:param pulumi.Input[str] region: Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
:param pulumi.Input[Sequence[pulumi.Input['DeploymentKibanaTopologyArgs']]] topologies: Can be set multiple times to compose complex topologies.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if elasticsearch_cluster_ref_id is not None:
pulumi.set(__self__, "elasticsearch_cluster_ref_id", elasticsearch_cluster_ref_id)
if http_endpoint is not None:
pulumi.set(__self__, "http_endpoint", http_endpoint)
if https_endpoint is not None:
pulumi.set(__self__, "https_endpoint", https_endpoint)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
if region is not None:
pulumi.set(__self__, "region", region)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if topologies is not None:
pulumi.set(__self__, "topologies", topologies)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input['DeploymentKibanaConfigArgs']]:
"""
Kibana settings applied to all topologies unless overridden in the `topology` element.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input['DeploymentKibanaConfigArgs']]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="elasticsearchClusterRefId")
def elasticsearch_cluster_ref_id(self) -> Optional[pulumi.Input[str]]:
"""
This field references the `ref_id` of the deployment Elasticsearch cluster. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "elasticsearch_cluster_ref_id")
@elasticsearch_cluster_ref_id.setter
def elasticsearch_cluster_ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "elasticsearch_cluster_ref_id", value)
@property
@pulumi.getter(name="httpEndpoint")
def http_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http_endpoint")
@http_endpoint.setter
def http_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http_endpoint", value)
@property
@pulumi.getter(name="httpsEndpoint")
def https_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https_endpoint")
@https_endpoint.setter
def https_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https_endpoint", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the Kibana resource. The default value `main-kibana` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
Elasticsearch Service (ESS) region where to create the deployment. For Elastic Cloud Enterprise (ECE) installations, set `"ece-region"`.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter
def topologies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentKibanaTopologyArgs']]]]:
"""
Can be set multiple times to compose complex topologies.
"""
return pulumi.get(self, "topologies")
@topologies.setter
def topologies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DeploymentKibanaTopologyArgs']]]]):
pulumi.set(self, "topologies", value)
@pulumi.input_type
class DeploymentKibanaConfigArgs:
def __init__(__self__, *,
docker_image: Optional[pulumi.Input[str]] = None,
user_settings_json: Optional[pulumi.Input[str]] = None,
user_settings_override_json: Optional[pulumi.Input[str]] = None,
user_settings_override_yaml: Optional[pulumi.Input[str]] = None,
user_settings_yaml: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] user_settings_json: JSON-formatted user level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_json: JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_override_yaml: YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
:param pulumi.Input[str] user_settings_yaml: YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if user_settings_json is not None:
pulumi.set(__self__, "user_settings_json", user_settings_json)
if user_settings_override_json is not None:
pulumi.set(__self__, "user_settings_override_json", user_settings_override_json)
if user_settings_override_yaml is not None:
pulumi.set(__self__, "user_settings_override_yaml", user_settings_override_yaml)
if user_settings_yaml is not None:
pulumi.set(__self__, "user_settings_yaml", user_settings_yaml)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="userSettingsJson")
def user_settings_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_json")
@user_settings_json.setter
def user_settings_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_json", value)
@property
@pulumi.getter(name="userSettingsOverrideJson")
def user_settings_override_json(self) -> Optional[pulumi.Input[str]]:
"""
JSON-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_json")
@user_settings_override_json.setter
def user_settings_override_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_json", value)
@property
@pulumi.getter(name="userSettingsOverrideYaml")
def user_settings_override_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted admin (ECE) level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_override_yaml")
@user_settings_override_yaml.setter
def user_settings_override_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_override_yaml", value)
@property
@pulumi.getter(name="userSettingsYaml")
def user_settings_yaml(self) -> Optional[pulumi.Input[str]]:
"""
YAML-formatted user level `elasticsearch.yml` setting overrides.
"""
return pulumi.get(self, "user_settings_yaml")
@user_settings_yaml.setter
def user_settings_yaml(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_settings_yaml", value)
@pulumi.input_type
class DeploymentKibanaTopologyArgs:
def __init__(__self__, *,
instance_configuration_id: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
size_resource: Optional[pulumi.Input[str]] = None,
zone_count: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] instance_configuration_id: Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
:param pulumi.Input[str] size: Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
:param pulumi.Input[str] size_resource: Type of resource to which the size is assigned. Defaults to `"memory"`.
:param pulumi.Input[int] zone_count: Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
if instance_configuration_id is not None:
pulumi.set(__self__, "instance_configuration_id", instance_configuration_id)
if size is not None:
pulumi.set(__self__, "size", size)
if size_resource is not None:
pulumi.set(__self__, "size_resource", size_resource)
if zone_count is not None:
pulumi.set(__self__, "zone_count", zone_count)
@property
@pulumi.getter(name="instanceConfigurationId")
def instance_configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Default instance configuration of the deployment template. No need to change this value since Kibana has only one _instance type_.
"""
return pulumi.get(self, "instance_configuration_id")
@instance_configuration_id.setter
def instance_configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_configuration_id", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
Amount in Gigabytes per topology element in the `"<size in GB>g"` notation. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="sizeResource")
def size_resource(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource to which the size is assigned. Defaults to `"memory"`.
"""
return pulumi.get(self, "size_resource")
@size_resource.setter
def size_resource(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size_resource", value)
@property
@pulumi.getter(name="zoneCount")
def zone_count(self) -> Optional[pulumi.Input[int]]:
"""
Number of zones the instance type of the Elasticsearch cluster will span. This is used to set or unset HA on an Elasticsearch node type. When omitted, it defaults to the deployment template value.
"""
return pulumi.get(self, "zone_count")
@zone_count.setter
def zone_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "zone_count", value)
@pulumi.input_type
class DeploymentObservabilityArgs:
def __init__(__self__, *,
deployment_id: pulumi.Input[str],
logs: Optional[pulumi.Input[bool]] = None,
metrics: Optional[pulumi.Input[bool]] = None,
ref_id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] deployment_id: Remote deployment ID.
:param pulumi.Input[str] ref_id: Can be set on the Elasticsearch resource. The default value `main-elasticsearch` is recommended.
"""
pulumi.set(__self__, "deployment_id", deployment_id)
if logs is not None:
pulumi.set(__self__, "logs", logs)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if ref_id is not None:
pulumi.set(__self__, "ref_id", ref_id)
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> pulumi.Input[str]:
"""
Remote deployment ID.
"""
return pulumi.get(self, "deployment_id")
@deployment_id.setter
def deployment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "deployment_id", value)
@property
@pulumi.getter
def logs(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "logs")
@logs.setter
def logs(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "logs", value)
@property
@pulumi.getter
def metrics(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "metrics")
@metrics.setter
def metrics(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "metrics", value)
@property
@pulumi.getter(name="refId")
def ref_id(self) -> Optional[pulumi.Input[str]]:
"""
Can be set on the Elasticsearch resource. The default value `main-elasticsearch` is recommended.
"""
return pulumi.get(self, "ref_id")
@ref_id.setter
def ref_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref_id", value)
@pulumi.input_type
class DeploymentTrafficFilterRuleArgs:
def __init__(__self__, *,
azure_endpoint_guid: Optional[pulumi.Input[str]] = None,
azure_endpoint_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] azure_endpoint_guid: Azure endpoint GUID. Only applicable when the ruleset type is set to `"azure_private_endpoint"`.
:param pulumi.Input[str] azure_endpoint_name: Azure endpoint name. Only applicable when the ruleset type is set to `"azure_private_endpoint"`.
:param pulumi.Input[str] description: Description of this individual rule.
:param pulumi.Input[str] id: The ruleset ID.
:param pulumi.Input[str] source: traffic filter source: IP address, CIDR mask, or VPC endpoint ID, **only required** when the type is not `"azure_private_endpoint"`.
"""
if azure_endpoint_guid is not None:
pulumi.set(__self__, "azure_endpoint_guid", azure_endpoint_guid)
if azure_endpoint_name is not None:
pulumi.set(__self__, "azure_endpoint_name", azure_endpoint_name)
if description is not None:
pulumi.set(__self__, "description", description)
if id is not None:
pulumi.set(__self__, "id", id)
if source is not None:
pulumi.set(__self__, "source", source)
@property
@pulumi.getter(name="azureEndpointGuid")
def azure_endpoint_guid(self) -> Optional[pulumi.Input[str]]:
"""
Azure endpoint GUID. Only applicable when the ruleset type is set to `"azure_private_endpoint"`.
"""
return pulumi.get(self, "azure_endpoint_guid")
@azure_endpoint_guid.setter
def azure_endpoint_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_endpoint_guid", value)
@property
@pulumi.getter(name="azureEndpointName")
def azure_endpoint_name(self) -> Optional[pulumi.Input[str]]:
"""
Azure endpoint name. Only applicable when the ruleset type is set to `"azure_private_endpoint"`.
"""
return pulumi.get(self, "azure_endpoint_name")
@azure_endpoint_name.setter
def azure_endpoint_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "azure_endpoint_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of this individual rule.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ruleset ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
"""
traffic filter source: IP address, CIDR mask, or VPC endpoint ID, **only required** when the type is not `"azure_private_endpoint"`.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@pulumi.input_type
class GetDeploymentsApmArgs:
def __init__(__self__, *,
healthy: Optional[str] = None,
status: Optional[str] = None,
version: Optional[str] = None):
"""
:param str healthy: Overall health status of the deployment.
"""
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if status is not None:
pulumi.set(__self__, "status", status)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def healthy(self) -> Optional[str]:
"""
Overall health status of the deployment.
"""
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[str]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class GetDeploymentsElasticsearchArgs:
def __init__(__self__, *,
healthy: Optional[str] = None,
status: Optional[str] = None,
version: Optional[str] = None):
"""
:param str healthy: Overall health status of the deployment.
"""
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if status is not None:
pulumi.set(__self__, "status", status)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def healthy(self) -> Optional[str]:
"""
Overall health status of the deployment.
"""
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[str]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class GetDeploymentsEnterpriseSearchArgs:
def __init__(__self__, *,
healthy: Optional[str] = None,
status: Optional[str] = None,
version: Optional[str] = None):
"""
:param str healthy: Overall health status of the deployment.
"""
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if status is not None:
pulumi.set(__self__, "status", status)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def healthy(self) -> Optional[str]:
"""
Overall health status of the deployment.
"""
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[str]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class GetDeploymentsIntegrationsServerArgs:
def __init__(__self__, *,
healthy: Optional[str] = None,
status: Optional[str] = None,
version: Optional[str] = None):
"""
:param str healthy: Overall health status of the deployment.
"""
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if status is not None:
pulumi.set(__self__, "status", status)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def healthy(self) -> Optional[str]:
"""
Overall health status of the deployment.
"""
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[str]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class GetDeploymentsKibanaArgs:
def __init__(__self__, *,
healthy: Optional[str] = None,
status: Optional[str] = None,
version: Optional[str] = None):
"""
:param str healthy: Overall health status of the deployment.
"""
if healthy is not None:
pulumi.set(__self__, "healthy", healthy)
if status is not None:
pulumi.set(__self__, "status", status)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def healthy(self) -> Optional[str]:
"""
Overall health status of the deployment.
"""
return pulumi.get(self, "healthy")
@healthy.setter
def healthy(self, value: Optional[str]):
pulumi.set(self, "healthy", value)
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[str]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[str]):
pulumi.set(self, "version", value)
| 43.540414
| 314
| 0.672738
| 13,139
| 111,507
| 5.511835
| 0.029607
| 0.094173
| 0.079067
| 0.083844
| 0.913063
| 0.880655
| 0.860605
| 0.81356
| 0.793607
| 0.771776
| 0
| 0.000011
| 0.21856
| 111,507
| 2,560
| 315
| 43.557422
| 0.831103
| 0.251141
| 0
| 0.746988
| 1
| 0
| 0.129211
| 0.063196
| 0
| 0
| 0
| 0
| 0
| 1
| 0.204217
| false
| 0
| 0.003012
| 0.022892
| 0.318072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
332c61989e80deeea9aecd4ad7d38910a4944319
| 35
|
py
|
Python
|
src/text_cleaning/__init__.py
|
ku-nlp/text-cleaning
|
0fd762bbebb2195f24332f5a52f80c308b73ea97
|
[
"MIT"
] | 2
|
2020-02-15T03:47:22.000Z
|
2020-02-17T04:47:43.000Z
|
src/text_cleaning/__init__.py
|
ku-nlp/text-cleaning
|
0fd762bbebb2195f24332f5a52f80c308b73ea97
|
[
"MIT"
] | 4
|
2020-02-17T11:27:16.000Z
|
2021-06-21T11:08:01.000Z
|
src/text_cleaning/__init__.py
|
ku-nlp/text-cleaning
|
0fd762bbebb2195f24332f5a52f80c308b73ea97
|
[
"MIT"
] | 1
|
2020-03-17T08:44:13.000Z
|
2020-03-17T08:44:13.000Z
|
from .clean_text import clean_text
| 17.5
| 34
| 0.857143
| 6
| 35
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
33544a612e2916bb78e0074a1bc276f6ba43ebe8
| 99,028
|
py
|
Python
|
deltalm/src/fairseq/models/xlmt_decoder_variant.py
|
mbrner/unilm
|
5988aa468a8b86e9a6689a601ca3b0263c74f0c5
|
[
"MIT"
] | 4
|
2022-03-31T02:16:34.000Z
|
2022-03-31T15:38:50.000Z
|
deltalm/src/fairseq/models/xlmt_decoder_variant.py
|
mbrner/unilm
|
5988aa468a8b86e9a6689a601ca3b0263c74f0c5
|
[
"MIT"
] | null | null | null |
deltalm/src/fairseq/models/xlmt_decoder_variant.py
|
mbrner/unilm
|
5988aa468a8b86e9a6689a601ca3b0263c74f0c5
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from typing import Any, Dict, List, Optional, Tuple
import torch
import torch.nn as nn
from torch import Tensor
from fairseq import checkpoint_utils
from fairseq.data.legacy.masked_lm_dictionary import MaskedLMDictionary
from fairseq.models import register_model, register_model_architecture
from fairseq.models.transformer import (
TransformerDecoder,
TransformerEncoder,
)
from fairseq.models.transformer_from_pretrained_infoxlm import (
TransformerFromPretrainedInfoXLMModel,
upgrade_state_dict_with_infoxlm_weights
)
from fairseq.modules.transformer_layer import (
TransformerDecoderLayer
)
from fairseq.modules.multihead_attention import MultiheadAttention
from fairseq.modules import LayerNorm
from fairseq.modules.fairseq_dropout import FairseqDropout
from fairseq.modules.quant_noise import quant_noise
from fairseq import utils
from fairseq.file_io import PathManager
from fairseq.checkpoint_utils import expand_embedding_matrix_v2
import logging
logger = logging.getLogger(__name__)
def upgrade_state_dict_for_two_ffn(
state_dict: Dict[str, Any], pretrained_infoxlm_checkpoint: str, num_layers: int
) -> Dict[str, Any]:
if not os.path.exists(pretrained_infoxlm_checkpoint):
raise IOError("Model file not found: {}".format(pretrained_infoxlm_checkpoint))
# state = checkpoint_utils.load_checkpoint_to_cpu(pretrained_infoxlm_checkpoint)
with open(PathManager.get_local_path(pretrained_infoxlm_checkpoint), "rb") as f:
state = torch.load(f, map_location=torch.device("cpu"))
infoxlm_state_dict = state["model"]
for key in infoxlm_state_dict.keys():
if 'layers' in key and int(key.split('.')[3]) > 2*num_layers-1:
continue
if not key.startswith('decoder.'):
continue
if 'lm_head' not in key:
if 'in_proj_weight' in key:
q, k ,v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'in_proj_bias' in key:
q, k ,v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'fc1' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('fc1', 'fc3').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'fc2' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('fc2', 'fc4').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'final_layer_norm' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('final_layer_norm', 'ffn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.out_proj' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn.out_proj', 'encoder_attn.out_proj').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.k_proj' in key or 'self_attn.v_proj' in key or 'self_attn.q_proj' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn', 'encoder_attn').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn_layer_norm' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn_layer_norm', 'encoder_attn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'emb_layer_norm' in key:
state_dict[key.replace('decoder.sentence_encoder.emb_layer_norm', 'layernorm_embedding')] = infoxlm_state_dict[key]
elif 'embed_positions' in key:
state_dict[key.replace('decoder.sentence_encoder.', '')] = infoxlm_state_dict[key][:state_dict[key.replace('decoder.sentence_encoder.', '')].size(0)]
elif 'embed_tokens' in key:
state_dict[key.replace('decoder.sentence_encoder.', '')][:infoxlm_state_dict[key].size(0)] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '')] = infoxlm_state_dict[key]
return state_dict
def upgrade_gpt_state_dict_for_two_ffn(
state_dict: Dict[str, Any], pretrained_infoxlm_checkpoint: str, num_layers: int, use_adapter=False
) -> Dict[str, Any]:
if not os.path.exists(pretrained_infoxlm_checkpoint):
raise IOError("Model file not found: {}".format(pretrained_infoxlm_checkpoint))
# state = checkpoint_utils.load_checkpoint_to_cpu(pretrained_infoxlm_checkpoint)
with open(PathManager.get_local_path(pretrained_infoxlm_checkpoint), "rb") as f:
state = torch.load(f, map_location=torch.device("cpu"))
infoxlm_state_dict = state["model"]
for key in infoxlm_state_dict.keys():
if 'layers' in key and int(key.split('.')[2]) > 2 * num_layers - 1:
continue
if not key.startswith('decoder.'):
continue
if 'lm_head' not in key:
if "adapter" in key and use_adapter:
state_dict[key.replace('decoder.', '')] = infoxlm_state_dict[key]
elif 'in_proj_weight' in key:
q, k, v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[2])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'in_proj_bias' in key:
q, k, v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[2])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'fc1' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.', '').replace('fc1', 'fc3').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'fc2' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.', '').replace('fc2', 'fc4').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'final_layer_norm' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.', '').replace('final_layer_norm', 'ffn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.out_proj' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.', '').replace('self_attn.out_proj', 'encoder_attn.out_proj').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.k_proj' in key or 'self_attn.v_proj' in key or 'self_attn.q_proj' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.', '').replace('self_attn', 'encoder_attn').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn_layer_norm' in key:
i_layer = int(key.split('.')[2])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.', '').replace('self_attn_layer_norm', 'encoder_attn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'emb_layer_norm' in key:
state_dict[key.replace('decoder.emb_layer_norm', 'layernorm_embedding')] = infoxlm_state_dict[key]
elif 'embed_positions' in key:
state_dict[key.replace('decoder.', '')] = infoxlm_state_dict[key][:state_dict[key.replace('decoder.', '')].size(0)]
elif 'embed_tokens' in key:
state_dict[key.replace('decoder.', '')][:infoxlm_state_dict[key].size(0)] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.', '')] = infoxlm_state_dict[key]
return state_dict
def upgrade_gpt_state_dict(
state_dict: Dict[str, Any], pretrained_infoxlm_checkpoint: str, num_layers: int
) -> Dict[str, Any]:
if not os.path.exists(pretrained_infoxlm_checkpoint):
raise IOError("Model file not found: {}".format(pretrained_infoxlm_checkpoint))
# state = checkpoint_utils.load_checkpoint_to_cpu(pretrained_infoxlm_checkpoint)
with open(PathManager.get_local_path(pretrained_infoxlm_checkpoint), "rb") as f:
state = torch.load(f, map_location=torch.device("cpu"))
infoxlm_state_dict = state["model"]
for key in infoxlm_state_dict.keys():
if 'layers' in key and int(key.split('.')[2]) > num_layers - 1:
continue
if not key.startswith('decoder.'):
continue
state_dict[key.replace('decoder.', '')] = infoxlm_state_dict[key]
return state_dict
def upgrade_state_dict_for_ca_first_two_ffn(
state_dict: Dict[str, Any], pretrained_infoxlm_checkpoint: str, num_layers: int
) -> Dict[str, Any]:
if not os.path.exists(pretrained_infoxlm_checkpoint):
raise IOError("Model file not found: {}".format(pretrained_infoxlm_checkpoint))
# state = checkpoint_utils.load_checkpoint_to_cpu(pretrained_infoxlm_checkpoint)
with open(PathManager.get_local_path(pretrained_infoxlm_checkpoint), "rb") as f:
state = torch.load(f, map_location=torch.device("cpu"))
infoxlm_state_dict = state["model"]
for key in infoxlm_state_dict.keys():
if 'layers' in key and int(key.split('.')[3]) > 2*num_layers-1:
continue
if not key.startswith('decoder.'):
continue
if 'lm_head' not in key:
if 'in_proj_weight' in key:
q, k ,v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[3])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'q_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'k_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_weight', 'v_proj.weight').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'in_proj_bias' in key:
q, k ,v = infoxlm_state_dict[key].chunk(3, dim=0)
i_layer = int(key.split('.')[3])
if i_layer % 2 == 1:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}')] = v
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'q_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = q
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'k_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = k
state_dict[key.replace('decoder.sentence_encoder.', '').replace('in_proj_bias', 'v_proj.bias').replace(f'.{i_layer}', f'.{i_layer // 2}').replace('self_attn', 'encoder_attn')] = v
elif 'fc1' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('fc1', 'fc3').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'fc2' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('fc2', 'fc4').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'final_layer_norm' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('final_layer_norm', 'ffn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.out_proj' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn.out_proj', 'encoder_attn.out_proj').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn.k_proj' in key or 'self_attn.v_proj' in key or 'self_attn.q_proj' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn', 'encoder_attn').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'self_attn_layer_norm' in key:
i_layer = int(key.split('.')[3])
if i_layer % 2 == 0:
state_dict[key.replace('decoder.sentence_encoder.', '').replace('self_attn_layer_norm', 'encoder_attn_layer_norm').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '').replace(f'.{i_layer}', f'.{i_layer // 2}')] = infoxlm_state_dict[key]
elif 'emb_layer_norm' in key:
state_dict[key.replace('decoder.sentence_encoder.emb_layer_norm', 'layernorm_embedding')] = infoxlm_state_dict[key]
elif 'embed_positions' in key:
state_dict[key.replace('decoder.sentence_encoder.', '')] = infoxlm_state_dict[key][:state_dict[key.replace('decoder.sentence_encoder.', '')].size(0)]
elif 'embed_tokens' in key:
state_dict[key.replace('decoder.sentence_encoder.', '')][:infoxlm_state_dict[key].size(0)] = infoxlm_state_dict[key]
else:
state_dict[key.replace('decoder.sentence_encoder.', '')] = infoxlm_state_dict[key]
return state_dict
def upgrade_deltalm_state_for_xlmt_model(
state_dict: Dict[str, Any], pretrained_deltalm_checkpoint: str, encoder: TransformerEncoder
) -> Dict[str, Any]:
if not os.path.exists(pretrained_deltalm_checkpoint):
raise IOError("Model file not found: {}".format(pretrained_deltalm_checkpoint))
# state = checkpoint_utils.load_checkpoint_to_cpu(pretrained_infoxlm_checkpoint)
with open(PathManager.get_local_path(pretrained_deltalm_checkpoint), "rb") as f:
state = torch.load(f, map_location=torch.device("cpu"))
mt_state_dict = state["weights"]
for key in mt_state_dict.keys():
if 'src_embedding' in key:
new_key = key.replace('src_embedding', 'encoder')
state_dict[new_key] = mt_state_dict[key][:state_dict[new_key].size(0)]
assert new_key in state_dict.keys()
elif 'tgt_embedding' in key:
new_key = key.replace('tgt_embedding', 'decoder')
assert new_key in state_dict.keys()
state_dict[new_key] = mt_state_dict[key][:state_dict[new_key].size(0)]
state_dict['decoder.output_projection.weight'] = state_dict['decoder.embed_tokens.weight']
elif 'ffn_1.fc1' in key:
new_key = key.replace('ffn_1.fc1', 'fc1')
if new_key in state_dict.keys():
state_dict[new_key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(new_key))
elif 'ffn_1.fc2' in key:
new_key = key.replace('ffn_1.fc2', 'fc2')
if new_key in state_dict.keys():
state_dict[new_key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(new_key))
elif 'ffn_2.fc1' in key:
new_key = key.replace('ffn_2.fc1', 'fc3')
if new_key in state_dict.keys():
state_dict[new_key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(new_key))
elif 'ffn_2.fc2' in key:
new_key = key.replace('ffn_2.fc2', 'fc4')
if new_key in state_dict.keys():
state_dict[new_key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(new_key))
elif 'ffn.fc' in key:
new_key = key.replace('ffn.fc', 'fc')
if new_key in state_dict.keys():
state_dict[new_key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(new_key))
elif key in state_dict.keys():
state_dict[key] = mt_state_dict[key]
else:
logger.info("Skipping {}".format(key))
state_dict = expand_embedding_matrix_v2(state_dict, encoder, 'random')
return state_dict
@register_model("xlmt_decoder_variant")
class XLMTDecoderVariantModel(TransformerFromPretrainedInfoXLMModel):
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
TransformerFromPretrainedInfoXLMModel.add_args(parser)
parser.add_argument(
"--variant",
type=str,
metavar="STR",
)
parser.add_argument(
"--pretrained-deltalm-checkpoint",
type=str,
metavar="STR",
)
@classmethod
def build_encoder(cls, args, tgt_dict, embed_tokens):
return XLMTEncoder(args, tgt_dict, embed_tokens)
@classmethod
def build_decoder(cls, args, tgt_dict, embed_tokens):
return XLMTDecoder(args, tgt_dict, embed_tokens)
def __init__(self, args, encoder, decoder):
super().__init__(args, encoder, decoder)
# Loading from Delta-LM Pretrained Model
if hasattr(args, "pretrained_deltalm_checkpoint") and os.path.exists(args.pretrained_deltalm_checkpoint):
deltalm_loaded_state_dict = upgrade_deltalm_state_for_xlmt_model(
state_dict=self.state_dict(),
pretrained_deltalm_checkpoint=args.pretrained_deltalm_checkpoint,
encoder=encoder
)
logger.info("Loading pretrained_deltalm_checkpoint from {0}".format(args.pretrained_deltalm_checkpoint))
self.load_state_dict(deltalm_loaded_state_dict, strict=True)
else:
logger.info("Can not Load pretrained_deltalm_checkpoint !")
# End #
def forward(
self,
src_tokens,
src_lengths,
prev_output_tokens,
return_all_hiddens: bool = True,
features_only: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
**extra_args
):
encoder_out = self.encoder(
src_tokens, src_lengths=src_lengths, return_all_hiddens=return_all_hiddens
)
decoder_out = self.decoder(
prev_output_tokens,
encoder_out=encoder_out,
features_only=features_only,
alignment_layer=alignment_layer,
alignment_heads=alignment_heads,
src_lengths=src_lengths,
return_all_hiddens=return_all_hiddens,
src_lang_id = extra_args["src_lang_id"] if "src_lang_id" in extra_args else None,
tgt_lang_id = extra_args["tgt_lang_id"] if "tgt_lang_id" in extra_args else None,
)
return decoder_out
class XLMTEncoder(TransformerEncoder):
def __init__(self, args, dictionary, embed_tokens):
super().__init__(args, dictionary, embed_tokens)
if not getattr(args, "init_encoder_only", False):
return
if hasattr(args, "pretrained_infoxlm_checkpoint") and os.path.exists(args.pretrained_infoxlm_checkpoint):
infoxlm_loaded_state_dict = upgrade_state_dict_with_infoxlm_weights(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_infoxlm_checkpoint,
num_layers=args.encoder_layers,
)
self.load_state_dict(infoxlm_loaded_state_dict, strict=False)
print("Loading encoder from {0}".format(args.pretrained_infoxlm_checkpoint))
if getattr(args, 'freeze_encoder', False):
for param in self.layers.parameters():
param.requires_grad = False
class XLMTDecoder(TransformerDecoder):
def __init__(self, args, dictionary, embed_tokens, no_encoder_attn=False):
super().__init__(args, dictionary, embed_tokens, no_encoder_attn)
if not getattr(args, "init_decoder_only", False):
return
args.pretrained_infoxlm_checkpoint = getattr(args, "pretrained_infoxlm_checkpoint", "")
if os.path.exists(args.pretrained_infoxlm_checkpoint):
if args.variant == 'addffn':
infoxlm_loaded_state_dict = upgrade_state_dict_for_two_ffn(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_infoxlm_checkpoint,
num_layers=args.decoder_layers,
)
print("Loading decoder from {0}".format(args.pretrained_infoxlm_checkpoint))
elif args.variant == 'gpt-addffn':
infoxlm_loaded_state_dict = upgrade_gpt_state_dict_for_two_ffn(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_gpt_checkpoint,
num_layers=args.decoder_layers,
use_adapter=self.use_adapter
)
print("Loading decoder from {0}".format(args.pretrained_gpt_checkpoint))
elif args.variant == 'gpt-two-attn':
infoxlm_loaded_state_dict = upgrade_gpt_state_dict(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_gpt_checkpoint,
num_layers=args.decoder_layers,
)
print("Loading decoder from {0}".format(args.pretrained_gpt_checkpoint))
elif args.variant == 'cafirst_addffn':
infoxlm_loaded_state_dict = upgrade_state_dict_for_ca_first_two_ffn(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_infoxlm_checkpoint,
num_layers=args.decoder_layers,
)
print("Loading decoder from {0}".format(args.pretrained_infoxlm_checkpoint))
else:
infoxlm_loaded_state_dict = upgrade_state_dict_with_infoxlm_weights(
state_dict=self.state_dict(),
pretrained_infoxlm_checkpoint=args.pretrained_infoxlm_checkpoint,
num_layers=args.decoder_layers,
)
print("Loading decoder from {0}".format(args.pretrained_infoxlm_checkpoint))
self.load_state_dict(infoxlm_loaded_state_dict, strict=False)
def build_decoder_layer(self, args, no_encoder_attn=False):
if args.variant == 'first':
layer = XLMTCrossAttnFirstLayer(args, no_encoder_attn)
elif args.variant == 'large':
layer = XLMTCrossAttnLargeLayer(args, no_encoder_attn)
elif args.variant == 'halfffn':
layer = XLMTTwoHalfFFN(args, no_encoder_attn)
elif args.variant == 'addffn' or args.variant == 'gpt-addffn':
layer = XLMTAddFFN(args, no_encoder_attn)
elif args.variant == 'gpt-two-attn':
layer = TransformerDecoderLayer(args, no_encoder_attn)
elif args.variant == 'first_large_halfffn':
layer = XLMTCaFirstQKLargeTwoHalfFFN(args, no_encoder_attn)
elif args.variant == 'ca_sa_large':
layer = XLMTCrossAttnSelfAttnLargeLayer(args, no_encoder_attn)
elif args.variant == 'cafirst_addffn':
layer = XLMTCaFirstAddFFN(args, no_encoder_attn)
else:
raise NotImplementedError
if getattr(args, "checkpoint_activations", False):
layer = checkpoint_wrapper(layer)
return layer
def forward(
self,
prev_output_tokens,
encoder_out: Optional[Dict[str, List[Tensor]]] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
features_only: bool = False,
full_context_alignment: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
src_lengths: Optional[Any] = None,
return_all_hiddens: bool = False,
src_lang_id = None,
tgt_lang_id = None
):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for teacher forcing
encoder_out (optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
features_only (bool, optional): only return features without
applying output layer (default: False).
full_context_alignment (bool, optional): don't apply
auto-regressive mask to self-attention (default: False).
Returns:
tuple:
- the decoder's output of shape `(batch, tgt_len, vocab)`
- a dictionary with any model-specific outputs
"""
x, extra = self.extract_features(
prev_output_tokens,
encoder_out=encoder_out,
incremental_state=incremental_state,
full_context_alignment=full_context_alignment,
alignment_layer=alignment_layer,
alignment_heads=alignment_heads,
src_lang_id=src_lang_id,
tgt_lang_id=tgt_lang_id,
)
if not features_only:
x = self.output_layer(x)
return x, extra
def extract_features(
self,
prev_output_tokens,
encoder_out: Optional[Dict[str, List[Tensor]]],
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
full_context_alignment: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
src_lang_id = None,
tgt_lang_id = None
):
return self.extract_features_scriptable(
prev_output_tokens,
encoder_out,
incremental_state,
full_context_alignment,
alignment_layer,
alignment_heads,
src_lang_id=src_lang_id,
tgt_lang_id=tgt_lang_id
)
def extract_features_scriptable(
self,
prev_output_tokens,
encoder_out: Optional[Dict[str, List[Tensor]]],
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
full_context_alignment: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
src_lang_id = None,
tgt_lang_id = None
):
"""
Similar to *forward* but only return features.
Includes several features from "Jointly Learning to Align and
Translate with Transformer Models" (Garg et al., EMNLP 2019).
Args:
full_context_alignment (bool, optional): don't apply
auto-regressive mask to self-attention (default: False).
alignment_layer (int, optional): return mean alignment over
heads at this layer (default: last layer).
alignment_heads (int, optional): only average alignment over
this many heads (default: all heads).
Returns:
tuple:
- the decoder's features of shape `(batch, tgt_len, embed_dim)`
- a dictionary with any model-specific outputs
"""
if alignment_layer is None:
alignment_layer = self.num_layers - 1
# embed positions
positions = (
self.embed_positions(
prev_output_tokens, incremental_state=incremental_state
)
if self.embed_positions is not None
else None
)
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
if positions is not None:
positions = positions[:, -1:]
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
if self.quant_noise is not None:
x = self.quant_noise(x)
if self.project_in_dim is not None:
x = self.project_in_dim(x)
if positions is not None:
x += positions
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = self.dropout_module(x)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
self_attn_padding_mask: Optional[Tensor] = None
if self.cross_self_attention or prev_output_tokens.eq(self.padding_idx).any():
self_attn_padding_mask = prev_output_tokens.eq(self.padding_idx)
# decoder layers
attn: Optional[Tensor] = None
inner_states: List[Optional[Tensor]] = [x]
for idx, layer in enumerate(self.layers):
if incremental_state is None and not full_context_alignment:
self_attn_mask = self.buffered_future_mask(x)
else:
self_attn_mask = None
x, layer_attn, _ = layer(
x,
encoder_out["encoder_out"][0]
if (encoder_out is not None and len(encoder_out["encoder_out"]) > 0)
else None,
encoder_out["encoder_padding_mask"][0]
if (
encoder_out is not None
and len(encoder_out["encoder_padding_mask"]) > 0
)
else None,
incremental_state,
self_attn_mask=self_attn_mask,
self_attn_padding_mask=self_attn_padding_mask,
need_attn=bool((idx == alignment_layer)),
need_head_weights=bool((idx == alignment_layer)),
)
inner_states.append(x)
if layer_attn is not None and idx == alignment_layer:
attn = layer_attn.float().to(x)
if attn is not None:
if alignment_heads is not None:
attn = attn[:alignment_heads]
# average probabilities over heads
attn = attn.mean(dim=0)
if self.layer_norm is not None:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
if self.project_out_dim is not None:
x = self.project_out_dim(x)
return x, {"attn": [attn], "inner_states": inner_states}
class XLMTCaFirstQKLargeTwoHalfFFN(TransformerDecoderLayer):
def __init__(
self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False
):
super(TransformerDecoderLayer, self).__init__()
self.embed_dim = args.decoder_embed_dim
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.quant_noise = getattr(args, "quant_noise_pq", 0)
self.quant_noise_block_size = getattr(args, "quant_noise_pq_block_size", 8)
self.cross_self_attention = getattr(args, "cross_self_attention", False)
self.self_attn = self.build_self_attention(
self.embed_dim,
args,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
)
self.activation_fn = utils.get_activation_fn(
activation=str(args.activation_fn)
if getattr(args, "activation_fn", None) is not None
else "relu"
)
activation_dropout_p = getattr(args, "activation_dropout", 0) or 0
if activation_dropout_p == 0:
# for backwards compatibility with models that use args.relu_dropout
activation_dropout_p = getattr(args, "relu_dropout", 0) or 0
self.activation_dropout_module = FairseqDropout(
float(activation_dropout_p), module_name=self.__class__.__name__
)
self.normalize_before = args.decoder_normalize_before
# use layerNorm rather than FusedLayerNorm for exporting.
# char_inputs can be used to determint this.
# TODO remove this once we update apex with the fix
export = getattr(args, "char_inputs", False)
self.self_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.fc1 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim // 2,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc2 = self.build_fc2(
args.decoder_ffn_embed_dim // 2,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc3 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim // 2,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc4 = self.build_fc2(
args.decoder_ffn_embed_dim // 2,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.ffn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.final_layer_norm = LayerNorm(self.embed_dim, export=export)
self.need_attn = True
self.onnx_trace = False
def build_encoder_attention(self, embed_dim, args):
return MultiheadAttention(
embed_dim,
args.decoder_attention_heads,
kdim=embed_dim,
vdim=embed_dim,
qdim=embed_dim,
outdim=embed_dim,
qkprojdim=1152,
dropout=args.attention_dropout,
encoder_decoder_attention=True,
q_noise=self.quant_noise,
qn_block_size=self.quant_noise_block_size,
)
def forward(
self,
x,
encoder_out: Optional[torch.Tensor] = None,
encoder_padding_mask: Optional[torch.Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
prev_self_attn_state: Optional[List[torch.Tensor]] = None,
prev_attn_state: Optional[List[torch.Tensor]] = None,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
need_attn: bool = False,
need_head_weights: bool = False,
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor, optional): binary
ByteTensor of shape `(batch, src_len)` where padding
elements are indicated by ``1``.
need_attn (bool, optional): return attention weights
need_head_weights (bool, optional): return attention weights
for each head (default: return average over heads).
Returns:
encoded output of shape `(seq_len, batch, embed_dim)`
"""
if need_head_weights:
need_attn = True
###############################################
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
if prev_attn_state is not None:
prev_key, prev_value = prev_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_attn_state[2]
assert incremental_state is not None
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=need_attn or (not self.training and self.need_attn),
need_head_weights=need_head_weights,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.ffn_layer_norm(x)
x = self.activation_fn(self.fc3(x))
x = self.activation_dropout_module(x)
x = self.fc4(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.ffn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
if prev_self_attn_state is not None:
prev_key, prev_value = prev_self_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_self_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
assert incremental_state is not None
self.self_attn._set_input_buffer(incremental_state, saved_state)
_self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
if self.cross_self_attention and not (
incremental_state is not None
and _self_attn_input_buffer is not None
and "prev_key" in _self_attn_input_buffer
):
if self_attn_mask is not None:
assert encoder_out is not None
self_attn_mask = torch.cat(
(x.new_zeros(x.size(0), encoder_out.size(0)), self_attn_mask), dim=1
)
if self_attn_padding_mask is not None:
if encoder_padding_mask is None:
assert encoder_out is not None
encoder_padding_mask = self_attn_padding_mask.new_zeros(
encoder_out.size(1), encoder_out.size(0)
)
self_attn_padding_mask = torch.cat(
(encoder_padding_mask, self_attn_padding_mask), dim=1
)
assert encoder_out is not None
y = torch.cat((encoder_out, x), dim=0)
else:
y = x
x, attn = self.self_attn(
query=x,
key=y,
value=y,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
###############################################
if self.onnx_trace and incremental_state is not None:
saved_state = self.self_attn._get_input_buffer(incremental_state)
assert saved_state is not None
if self_attn_padding_mask is not None:
self_attn_state = [
saved_state["prev_key"],
saved_state["prev_value"],
saved_state["prev_key_padding_mask"],
]
else:
self_attn_state = [saved_state["prev_key"], saved_state["prev_value"]]
return x, attn, self_attn_state
return x, attn, None
class XLMTCaFirstAddFFN(TransformerDecoderLayer):
def __init__(
self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False
):
super(TransformerDecoderLayer, self).__init__()
self.embed_dim = args.decoder_embed_dim
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.quant_noise = getattr(args, "quant_noise_pq", 0)
self.quant_noise_block_size = getattr(args, "quant_noise_pq_block_size", 8)
self.cross_self_attention = getattr(args, "cross_self_attention", False)
self.self_attn = self.build_self_attention(
self.embed_dim,
args,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
)
self.activation_fn = utils.get_activation_fn(
activation=str(args.activation_fn)
if getattr(args, "activation_fn", None) is not None
else "relu"
)
activation_dropout_p = getattr(args, "activation_dropout", 0) or 0
if activation_dropout_p == 0:
# for backwards compatibility with models that use args.relu_dropout
activation_dropout_p = getattr(args, "relu_dropout", 0) or 0
self.activation_dropout_module = FairseqDropout(
float(activation_dropout_p), module_name=self.__class__.__name__
)
self.normalize_before = args.decoder_normalize_before
# use layerNorm rather than FusedLayerNorm for exporting.
# char_inputs can be used to determint this.
# TODO remove this once we update apex with the fix
export = getattr(args, "char_inputs", False)
self.self_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.fc1 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc2 = self.build_fc2(
args.decoder_ffn_embed_dim,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc3 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc4 = self.build_fc2(
args.decoder_ffn_embed_dim,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.ffn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.final_layer_norm = LayerNorm(self.embed_dim, export=export)
self.need_attn = True
self.onnx_trace = False
def forward(
self,
x,
encoder_out: Optional[torch.Tensor] = None,
encoder_padding_mask: Optional[torch.Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
prev_self_attn_state: Optional[List[torch.Tensor]] = None,
prev_attn_state: Optional[List[torch.Tensor]] = None,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
need_attn: bool = False,
need_head_weights: bool = False,
src_lang_id = None,
tgt_lang_id = None
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor, optional): binary
ByteTensor of shape `(batch, src_len)` where padding
elements are indicated by ``1``.
need_attn (bool, optional): return attention weights
need_head_weights (bool, optional): return attention weights
for each head (default: return average over heads).
Returns:
encoded output of shape `(seq_len, batch, embed_dim)`
"""
if need_head_weights:
need_attn = True
###############################################
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
if prev_attn_state is not None:
prev_key, prev_value = prev_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_attn_state[2]
assert incremental_state is not None
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=need_attn or (not self.training and self.need_attn),
need_head_weights=need_head_weights,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.ffn_layer_norm(x)
x = self.activation_fn(self.fc3(x))
x = self.activation_dropout_module(x)
x = self.fc4(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.ffn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
if prev_self_attn_state is not None:
prev_key, prev_value = prev_self_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_self_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
assert incremental_state is not None
self.self_attn._set_input_buffer(incremental_state, saved_state)
_self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
if self.cross_self_attention and not (
incremental_state is not None
and _self_attn_input_buffer is not None
and "prev_key" in _self_attn_input_buffer
):
if self_attn_mask is not None:
assert encoder_out is not None
self_attn_mask = torch.cat(
(x.new_zeros(x.size(0), encoder_out.size(0)), self_attn_mask), dim=1
)
if self_attn_padding_mask is not None:
if encoder_padding_mask is None:
assert encoder_out is not None
encoder_padding_mask = self_attn_padding_mask.new_zeros(
encoder_out.size(1), encoder_out.size(0)
)
self_attn_padding_mask = torch.cat(
(encoder_padding_mask, self_attn_padding_mask), dim=1
)
assert encoder_out is not None
y = torch.cat((encoder_out, x), dim=0)
else:
y = x
x, attn = self.self_attn(
query=x,
key=y,
value=y,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
if self.onnx_trace and incremental_state is not None:
saved_state = self.self_attn._get_input_buffer(incremental_state)
assert saved_state is not None
if self_attn_padding_mask is not None:
self_attn_state = [
saved_state["prev_key"],
saved_state["prev_value"],
saved_state["prev_key_padding_mask"],
]
else:
self_attn_state = [saved_state["prev_key"], saved_state["prev_value"]]
return x, attn, self_attn_state
return x, attn, None
class XLMTAddFFN(TransformerDecoderLayer):
def __init__(
self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False
):
super(TransformerDecoderLayer, self).__init__()
self.embed_dim = args.decoder_embed_dim
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.quant_noise = getattr(args, "quant_noise_pq", 0)
self.quant_noise_block_size = getattr(args, "quant_noise_pq_block_size", 8)
self.cross_self_attention = getattr(args, "cross_self_attention", False)
self.self_attn = self.build_self_attention(
self.embed_dim,
args,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
)
self.activation_fn = utils.get_activation_fn(
activation=str(args.activation_fn)
if getattr(args, "activation_fn", None) is not None
else "relu"
)
activation_dropout_p = getattr(args, "activation_dropout", 0) or 0
if activation_dropout_p == 0:
# for backwards compatibility with models that use args.relu_dropout
activation_dropout_p = getattr(args, "relu_dropout", 0) or 0
self.activation_dropout_module = FairseqDropout(
float(activation_dropout_p), module_name=self.__class__.__name__
)
self.normalize_before = args.decoder_normalize_before
# use layerNorm rather than FusedLayerNorm for exporting.
# char_inputs can be used to determint this.
# TODO remove this once we update apex with the fix
export = getattr(args, "char_inputs", False)
self.self_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.fc1 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc2 = self.build_fc2(
args.decoder_ffn_embed_dim,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc3 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc4 = self.build_fc2(
args.decoder_ffn_embed_dim,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.ffn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.final_layer_norm = LayerNorm(self.embed_dim, export=export)
self.need_attn = True
self.onnx_trace = False
# Language adapter (Added By JianYang)
self.adapter_dim = getattr(args, "adapter_dim", 0)
if self.adapter_dim > 0:
self.adapter_down_proj = nn.ModuleList([])
self.adapter_up_proj = nn.ModuleList([])
self.adapter_layer_norm = nn.ModuleList([])
for i in range(len(args.langs)):
self.adapter_down_proj.append(
self.build_fc1(
self.embed_dim,
self.adapter_dim,
self.quant_noise,
self.quant_noise_block_size,
))
self.adapter_up_proj.append(
self.build_fc2(
self.adapter_dim,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
))
self.adapter_layer_norm.append(LayerNorm(self.embed_dim))
# End #
def adapter_forward(self, x, lang_id):
residual = x
if self.normalize_before:
x = self.adapter_layer_norm[lang_id](x)
x = self.activation_fn(self.adapter_down_proj[lang_id](x))
x = self.activation_dropout_module(x)
x = self.adapter_up_proj[lang_id](x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.adapter_layer_norm[lang_id](x)
return x
def forward(
self,
x,
encoder_out: Optional[torch.Tensor] = None,
encoder_padding_mask: Optional[torch.Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
prev_self_attn_state: Optional[List[torch.Tensor]] = None,
prev_attn_state: Optional[List[torch.Tensor]] = None,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
need_attn: bool = False,
need_head_weights: bool = False,
src_lang_id = None,
tgt_lang_id = None
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor, optional): binary
ByteTensor of shape `(batch, src_len)` where padding
elements are indicated by ``1``.
need_attn (bool, optional): return attention weights
need_head_weights (bool, optional): return attention weights
for each head (default: return average over heads).
Returns:
encoded output of shape `(seq_len, batch, embed_dim)`
"""
if need_head_weights:
need_attn = True
###############################################
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
if prev_self_attn_state is not None:
prev_key, prev_value = prev_self_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_self_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
assert incremental_state is not None
self.self_attn._set_input_buffer(incremental_state, saved_state)
_self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
if self.cross_self_attention and not (
incremental_state is not None
and _self_attn_input_buffer is not None
and "prev_key" in _self_attn_input_buffer
):
if self_attn_mask is not None:
assert encoder_out is not None
self_attn_mask = torch.cat(
(x.new_zeros(x.size(0), encoder_out.size(0)), self_attn_mask), dim=1
)
if self_attn_padding_mask is not None:
if encoder_padding_mask is None:
assert encoder_out is not None
encoder_padding_mask = self_attn_padding_mask.new_zeros(
encoder_out.size(1), encoder_out.size(0)
)
self_attn_padding_mask = torch.cat(
(encoder_padding_mask, self_attn_padding_mask), dim=1
)
assert encoder_out is not None
y = torch.cat((encoder_out, x), dim=0)
else:
y = x
x, attn = self.self_attn(
query=x,
key=y,
value=y,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.ffn_layer_norm(x)
x = self.activation_fn(self.fc3(x))
x = self.activation_dropout_module(x)
x = self.fc4(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.ffn_layer_norm(x)
# Parallel Adapter (Added By Jian Yang)
if self.adapter_dim > 0:
x = self.residual_connection(x, self.adapter_forward(residual, tgt_lang_id))
# End #
###############################################
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
if prev_attn_state is not None:
prev_key, prev_value = prev_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_attn_state[2]
assert incremental_state is not None
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=need_attn or (not self.training and self.need_attn),
need_head_weights=need_head_weights,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
###############################################
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
#Parallel Adapter (Added By Jian Yang)
if self.adapter_dim > 0:
x = self.residual_connection(x, self.adapter_forward(residual, tgt_lang_id))
# End #
if self.onnx_trace and incremental_state is not None:
saved_state = self.self_attn._get_input_buffer(incremental_state)
assert saved_state is not None
if self_attn_padding_mask is not None:
self_attn_state = [
saved_state["prev_key"],
saved_state["prev_value"],
saved_state["prev_key_padding_mask"],
]
else:
self_attn_state = [saved_state["prev_key"], saved_state["prev_value"]]
return x, attn, self_attn_state
return x, attn, None
class XLMTTwoHalfFFN(TransformerDecoderLayer):
def __init__(
self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False
):
super(TransformerDecoderLayer, self).__init__()
self.embed_dim = args.decoder_embed_dim
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.quant_noise = getattr(args, "quant_noise_pq", 0)
self.quant_noise_block_size = getattr(args, "quant_noise_pq_block_size", 8)
self.cross_self_attention = getattr(args, "cross_self_attention", False)
self.self_attn = self.build_self_attention(
self.embed_dim,
args,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
)
self.activation_fn = utils.get_activation_fn(
activation=str(args.activation_fn)
if getattr(args, "activation_fn", None) is not None
else "relu"
)
activation_dropout_p = getattr(args, "activation_dropout", 0) or 0
if activation_dropout_p == 0:
# for backwards compatibility with models that use args.relu_dropout
activation_dropout_p = getattr(args, "relu_dropout", 0) or 0
self.activation_dropout_module = FairseqDropout(
float(activation_dropout_p), module_name=self.__class__.__name__
)
self.normalize_before = args.decoder_normalize_before
# use layerNorm rather than FusedLayerNorm for exporting.
# char_inputs can be used to determint this.
# TODO remove this once we update apex with the fix
export = getattr(args, "char_inputs", False)
self.self_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.fc1 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim // 2,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc2 = self.build_fc2(
args.decoder_ffn_embed_dim // 2,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc3 = self.build_fc1(
self.embed_dim,
args.decoder_ffn_embed_dim // 2,
self.quant_noise,
self.quant_noise_block_size,
)
self.fc4 = self.build_fc2(
args.decoder_ffn_embed_dim // 2,
self.embed_dim,
self.quant_noise,
self.quant_noise_block_size,
)
self.ffn_layer_norm = LayerNorm(self.embed_dim, export=export)
self.final_layer_norm = LayerNorm(self.embed_dim, export=export)
self.need_attn = True
self.onnx_trace = False
def forward(
self,
x,
encoder_out: Optional[torch.Tensor] = None,
encoder_padding_mask: Optional[torch.Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
prev_self_attn_state: Optional[List[torch.Tensor]] = None,
prev_attn_state: Optional[List[torch.Tensor]] = None,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
need_attn: bool = False,
need_head_weights: bool = False,
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor, optional): binary
ByteTensor of shape `(batch, src_len)` where padding
elements are indicated by ``1``.
need_attn (bool, optional): return attention weights
need_head_weights (bool, optional): return attention weights
for each head (default: return average over heads).
Returns:
encoded output of shape `(seq_len, batch, embed_dim)`
"""
if need_head_weights:
need_attn = True
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
if prev_self_attn_state is not None:
prev_key, prev_value = prev_self_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_self_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
assert incremental_state is not None
self.self_attn._set_input_buffer(incremental_state, saved_state)
_self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
if self.cross_self_attention and not (
incremental_state is not None
and _self_attn_input_buffer is not None
and "prev_key" in _self_attn_input_buffer
):
if self_attn_mask is not None:
assert encoder_out is not None
self_attn_mask = torch.cat(
(x.new_zeros(x.size(0), encoder_out.size(0)), self_attn_mask), dim=1
)
if self_attn_padding_mask is not None:
if encoder_padding_mask is None:
assert encoder_out is not None
encoder_padding_mask = self_attn_padding_mask.new_zeros(
encoder_out.size(1), encoder_out.size(0)
)
self_attn_padding_mask = torch.cat(
(encoder_padding_mask, self_attn_padding_mask), dim=1
)
assert encoder_out is not None
y = torch.cat((encoder_out, x), dim=0)
else:
y = x
x, attn = self.self_attn(
query=x,
key=y,
value=y,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.ffn_layer_norm(x)
x = self.activation_fn(self.fc3(x))
x = self.activation_dropout_module(x)
x = self.fc4(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.ffn_layer_norm(x)
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
if prev_attn_state is not None:
prev_key, prev_value = prev_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_attn_state[2]
assert incremental_state is not None
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=need_attn or (not self.training and self.need_attn),
need_head_weights=need_head_weights,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
if self.onnx_trace and incremental_state is not None:
saved_state = self.self_attn._get_input_buffer(incremental_state)
assert saved_state is not None
if self_attn_padding_mask is not None:
self_attn_state = [
saved_state["prev_key"],
saved_state["prev_value"],
saved_state["prev_key_padding_mask"],
]
else:
self_attn_state = [saved_state["prev_key"], saved_state["prev_value"]]
return x, attn, self_attn_state
return x, attn, None
class XLMTCrossAttnSelfAttnLargeLayer(TransformerDecoderLayer):
def build_self_attention(
self, embed_dim, args, add_bias_kv=False, add_zero_attn=False
):
return MultiheadAttention(
embed_dim,
args.decoder_attention_heads,
kdim=embed_dim,
vdim=embed_dim,
qdim=embed_dim,
outdim=embed_dim,
qkprojdim=1152,
dropout=args.attention_dropout,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
self_attention=not getattr(args, "cross_self_attention", False),
q_noise=self.quant_noise,
qn_block_size=self.quant_noise_block_size,
)
def build_encoder_attention(self, embed_dim, args):
return MultiheadAttention(
embed_dim,
args.decoder_attention_heads,
kdim=embed_dim,
vdim=embed_dim,
qdim=embed_dim,
outdim=embed_dim,
qkprojdim=1152,
dropout=args.attention_dropout,
encoder_decoder_attention=True,
q_noise=self.quant_noise,
qn_block_size=self.quant_noise_block_size,
)
class XLMTCrossAttnLargeLayer(TransformerDecoderLayer):
def build_encoder_attention(self, embed_dim, args):
return MultiheadAttention(
embed_dim,
args.decoder_attention_heads,
kdim=embed_dim,
vdim=embed_dim,
qdim=embed_dim,
outdim=embed_dim,
qkprojdim=1152,
dropout=args.attention_dropout,
encoder_decoder_attention=True,
q_noise=self.quant_noise,
qn_block_size=self.quant_noise_block_size,
)
class XLMTCrossAttnFirstLayer(TransformerDecoderLayer):
def forward(
self,
x,
encoder_out: Optional[torch.Tensor] = None,
encoder_padding_mask: Optional[torch.Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
prev_self_attn_state: Optional[List[torch.Tensor]] = None,
prev_attn_state: Optional[List[torch.Tensor]] = None,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
need_attn: bool = False,
need_head_weights: bool = False,
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor, optional): binary
ByteTensor of shape `(batch, src_len)` where padding
elements are indicated by ``1``.
need_attn (bool, optional): return attention weights
need_head_weights (bool, optional): return attention weights
for each head (default: return average over heads).
Returns:
encoded output of shape `(seq_len, batch, embed_dim)`
"""
if need_head_weights:
need_attn = True
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
if prev_attn_state is not None:
prev_key, prev_value = prev_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_attn_state[2]
assert incremental_state is not None
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=need_attn or (not self.training and self.need_attn),
need_head_weights=need_head_weights,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
if prev_self_attn_state is not None:
prev_key, prev_value = prev_self_attn_state[:2]
saved_state: Dict[str, Optional[Tensor]] = {
"prev_key": prev_key,
"prev_value": prev_value,
}
if len(prev_self_attn_state) >= 3:
saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
assert incremental_state is not None
self.self_attn._set_input_buffer(incremental_state, saved_state)
_self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
if self.cross_self_attention and not (
incremental_state is not None
and _self_attn_input_buffer is not None
and "prev_key" in _self_attn_input_buffer
):
if self_attn_mask is not None:
assert encoder_out is not None
self_attn_mask = torch.cat(
(x.new_zeros(x.size(0), encoder_out.size(0)), self_attn_mask), dim=1
)
if self_attn_padding_mask is not None:
if encoder_padding_mask is None:
assert encoder_out is not None
encoder_padding_mask = self_attn_padding_mask.new_zeros(
encoder_out.size(1), encoder_out.size(0)
)
self_attn_padding_mask = torch.cat(
(encoder_padding_mask, self_attn_padding_mask), dim=1
)
assert encoder_out is not None
y = torch.cat((encoder_out, x), dim=0)
else:
y = x
x, attn = self.self_attn(
query=x,
key=y,
value=y,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
if self.onnx_trace and incremental_state is not None:
saved_state = self.self_attn._get_input_buffer(incremental_state)
assert saved_state is not None
if self_attn_padding_mask is not None:
self_attn_state = [
saved_state["prev_key"],
saved_state["prev_value"],
saved_state["prev_key_padding_mask"],
]
else:
self_attn_state = [saved_state["prev_key"], saved_state["prev_value"]]
return x, attn, self_attn_state
return x, attn, None
@register_model_architecture(
"xlmt_decoder_variant", "xlmt_decoder_variant"
)
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 768)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 3072)
args.encoder_layers = getattr(args, "encoder_layers", 12)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 12)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", True)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 12)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", True)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "gelu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.layer_wise_attention = getattr(args, "layer_wise_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", True)
args.layernorm_embedding = getattr(args, "layernorm_embedding", True)
args.init_encoder_only = getattr(args, "init_encoder_only", False)
args.init_decoder_only = getattr(args, "init_decoder_only", False)
args.max_positions = getattr(args, "max_positions", 512)
#
args.adapter_dim = getattr(args, "adapter_dim", args.decoder_ffn_embed_dim)
args.adapter_method = getattr(args, "adapter_method", "all")
args.drop_adapter = getattr(args, "drop_adapter", -1)
@register_model_architecture(
"xlmt_decoder_variant", "xlmt_decoder_variant_large"
)
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 1024)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 4096)
args.encoder_layers = getattr(args, "encoder_layers", 24)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 16)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", True)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 12)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 16)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", True)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "gelu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.layer_wise_attention = getattr(args, "layer_wise_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", True)
args.layernorm_embedding = getattr(args, "layernorm_embedding", True)
args.init_encoder_only = getattr(args, "init_encoder_only", False)
args.init_decoder_only = getattr(args, "init_decoder_only", False)
args.max_positions = getattr(args, "max_positions", 512)
args.use_adapter = getattr(args, "use_adapter", False)
args.adapter_dropout = getattr(args, "adapter_dropout", args.dropout)
args.freeze_adapter = getattr(args, "freeze_adapter", False)
@register_model_architecture(
"xlmt_decoder_variant", "xlmt_decoder_variant_large_from_deltalm_postnorm"
)
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 1024)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 4096)
args.encoder_layers = getattr(args, "encoder_layers", 24)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 16)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", True)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 12)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 16)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", True)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "gelu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.layer_wise_attention = getattr(args, "layer_wise_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", True)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.init_encoder_only = getattr(args, "init_encoder_only", False)
args.init_decoder_only = getattr(args, "init_decoder_only", False)
args.max_positions = getattr(args, "max_positions", 512)
args.use_adapter = getattr(args, "use_adapter", False)
args.adapter_dropout = getattr(args, "adapter_dropout", args.dropout)
args.freeze_adapter = getattr(args, "freeze_adapter", False)
@register_model_architecture(
"xlmt_decoder_variant", "xlmt_decoder_variant_large_from_deltalm_prenorm"
)
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 1024)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 4096)
args.encoder_layers = getattr(args, "encoder_layers", 24)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 16)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", True)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", True)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 12)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 16)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", True)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", True)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "gelu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.layer_wise_attention = getattr(args, "layer_wise_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", True)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.init_encoder_only = getattr(args, "init_encoder_only", False)
args.init_decoder_only = getattr(args, "init_decoder_only", False)
args.max_positions = getattr(args, "max_positions", 512)
args.use_adapter = getattr(args, "use_adapter", False)
args.adapter_dropout = getattr(args, "adapter_dropout", args.dropout)
args.freeze_adapter = getattr(args, "freeze_adapter", False)
| 45.888786
| 204
| 0.611231
| 12,148
| 99,028
| 4.650477
| 0.032845
| 0.039349
| 0.033349
| 0.029932
| 0.902379
| 0.890041
| 0.88004
| 0.860321
| 0.855241
| 0.845612
| 0
| 0.00727
| 0.279062
| 99,028
| 2,158
| 205
| 45.888786
| 0.78404
| 0.05656
| 0
| 0.788827
| 0
| 0
| 0.123035
| 0.036998
| 0
| 0
| 0
| 0.001854
| 0.017877
| 1
| 0.018994
| false
| 0
| 0.010615
| 0.003911
| 0.051397
| 0.003352
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
682be5babb3ff09bf360f4aa1f9c45113e6bc394
| 36,631
|
py
|
Python
|
build/PureCloudPlatformClientV2/apis/response_management_api.py
|
cjohnson-ctl/platform-client-sdk-python
|
38ce53bb8012b66e8a43cc8bd6ff00cf6cc99100
|
[
"MIT"
] | 10
|
2019-02-22T00:27:08.000Z
|
2021-09-12T23:23:44.000Z
|
libs/PureCloudPlatformClientV2/apis/response_management_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 5
|
2018-06-07T08:32:00.000Z
|
2021-07-28T17:37:26.000Z
|
libs/PureCloudPlatformClientV2/apis/response_management_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 6
|
2020-04-09T17:43:07.000Z
|
2022-02-17T08:48:05.000Z
|
# coding: utf-8
"""
ResponseManagementApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ResponseManagementApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def delete_responsemanagement_library(self, library_id, **kwargs):
"""
Delete an existing response library.
This will remove any responses associated with the library.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_responsemanagement_library(library_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str library_id: Library ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['library_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_responsemanagement_library" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'library_id' is set
if ('library_id' not in params) or (params['library_id'] is None):
raise ValueError("Missing the required parameter `library_id` when calling `delete_responsemanagement_library`")
resource_path = '/api/v2/responsemanagement/libraries/{libraryId}'.replace('{format}', 'json')
path_params = {}
if 'library_id' in params:
path_params['libraryId'] = params['library_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_responsemanagement_response(self, response_id, **kwargs):
"""
Delete an existing response.
This will remove the response from any libraries associated with it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_responsemanagement_response(response_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str response_id: Response ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['response_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_responsemanagement_response" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'response_id' is set
if ('response_id' not in params) or (params['response_id'] is None):
raise ValueError("Missing the required parameter `response_id` when calling `delete_responsemanagement_response`")
resource_path = '/api/v2/responsemanagement/responses/{responseId}'.replace('{format}', 'json')
path_params = {}
if 'response_id' in params:
path_params['responseId'] = params['response_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_responsemanagement_libraries(self, **kwargs):
"""
Gets a list of existing response libraries.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_responsemanagement_libraries(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_number: Page number
:param int page_size: Page size
:param str messaging_template_filter: Returns a list of libraries that contain responses with at least one messaging template defined for a specific message channel
:return: LibraryEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size', 'messaging_template_filter']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_responsemanagement_libraries" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/responsemanagement/libraries'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'messaging_template_filter' in params:
query_params['messagingTemplateFilter'] = params['messaging_template_filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LibraryEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_responsemanagement_library(self, library_id, **kwargs):
"""
Get details about an existing response library.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_responsemanagement_library(library_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str library_id: Library ID (required)
:return: Library
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['library_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_responsemanagement_library" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'library_id' is set
if ('library_id' not in params) or (params['library_id'] is None):
raise ValueError("Missing the required parameter `library_id` when calling `get_responsemanagement_library`")
resource_path = '/api/v2/responsemanagement/libraries/{libraryId}'.replace('{format}', 'json')
path_params = {}
if 'library_id' in params:
path_params['libraryId'] = params['library_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Library',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_responsemanagement_response(self, response_id, **kwargs):
"""
Get details about an existing response.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_responsemanagement_response(response_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str response_id: Response ID (required)
:param str expand: Expand instructions for the return value.
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['response_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_responsemanagement_response" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'response_id' is set
if ('response_id' not in params) or (params['response_id'] is None):
raise ValueError("Missing the required parameter `response_id` when calling `get_responsemanagement_response`")
resource_path = '/api/v2/responsemanagement/responses/{responseId}'.replace('{format}', 'json')
path_params = {}
if 'response_id' in params:
path_params['responseId'] = params['response_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_responsemanagement_responses(self, library_id, **kwargs):
"""
Gets a list of existing responses.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_responsemanagement_responses(library_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str library_id: Library ID (required)
:param int page_number: Page number
:param int page_size: Page size
:param str expand: Expand instructions for the return value.
:return: ResponseEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['library_id', 'page_number', 'page_size', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_responsemanagement_responses" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'library_id' is set
if ('library_id' not in params) or (params['library_id'] is None):
raise ValueError("Missing the required parameter `library_id` when calling `get_responsemanagement_responses`")
resource_path = '/api/v2/responsemanagement/responses'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'library_id' in params:
query_params['libraryId'] = params['library_id']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_responsemanagement_libraries(self, body, **kwargs):
"""
Create a response library.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_responsemanagement_libraries(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Library body: Library (required)
:return: Library
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_responsemanagement_libraries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_responsemanagement_libraries`")
resource_path = '/api/v2/responsemanagement/libraries'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Library',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_responsemanagement_responses(self, body, **kwargs):
"""
Create a response.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_responsemanagement_responses(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Response body: Response (required)
:param str expand: Expand instructions for the return value.
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_responsemanagement_responses" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_responsemanagement_responses`")
resource_path = '/api/v2/responsemanagement/responses'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_responsemanagement_responses_query(self, body, **kwargs):
"""
Query responses
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_responsemanagement_responses_query(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ResponseQueryRequest body: Response (required)
:return: ResponseQueryResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_responsemanagement_responses_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_responsemanagement_responses_query`")
resource_path = '/api/v2/responsemanagement/responses/query'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseQueryResults',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_responsemanagement_library(self, library_id, body, **kwargs):
"""
Update an existing response library.
Fields that can be updated: name. The most recent version is required for updates.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_responsemanagement_library(library_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str library_id: Library ID (required)
:param Library body: Library (required)
:return: Library
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['library_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_responsemanagement_library" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'library_id' is set
if ('library_id' not in params) or (params['library_id'] is None):
raise ValueError("Missing the required parameter `library_id` when calling `put_responsemanagement_library`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_responsemanagement_library`")
resource_path = '/api/v2/responsemanagement/libraries/{libraryId}'.replace('{format}', 'json')
path_params = {}
if 'library_id' in params:
path_params['libraryId'] = params['library_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Library',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_responsemanagement_response(self, response_id, body, **kwargs):
"""
Update an existing response.
Fields that can be updated: name, libraries, and texts. The most recent version is required for updates.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_responsemanagement_response(response_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str response_id: Response ID (required)
:param Response body: Response (required)
:param str expand: Expand instructions for the return value.
:return: Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['response_id', 'body', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_responsemanagement_response" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'response_id' is set
if ('response_id' not in params) or (params['response_id'] is None):
raise ValueError("Missing the required parameter `response_id` when calling `put_responsemanagement_response`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_responsemanagement_response`")
resource_path = '/api/v2/responsemanagement/responses/{responseId}'.replace('{format}', 'json')
path_params = {}
if 'response_id' in params:
path_params['responseId'] = params['response_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Response',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 39.01065
| 172
| 0.553821
| 3,517
| 36,631
| 5.581746
| 0.065965
| 0.040344
| 0.02384
| 0.021293
| 0.914064
| 0.903927
| 0.884519
| 0.880852
| 0.87749
| 0.875045
| 0
| 0.000946
| 0.365128
| 36,631
| 938
| 173
| 39.052239
| 0.84318
| 0.26595
| 0
| 0.846626
| 0
| 0
| 0.188894
| 0.054066
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02454
| false
| 0
| 0.014315
| 0
| 0.063395
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
689b298d5e0d4bc0730c0526a9b9adc0cc11db33
| 12,058
|
py
|
Python
|
tests/unit/intersection/test_policy_shard.py
|
etta-trust/PolicyGlass
|
72157189a9af3172e6efbdcc2050969796cfa99f
|
[
"MIT"
] | 49
|
2021-12-21T23:15:55.000Z
|
2022-03-28T09:38:30.000Z
|
tests/unit/intersection/test_policy_shard.py
|
etta-trust/PolicyGlass
|
72157189a9af3172e6efbdcc2050969796cfa99f
|
[
"MIT"
] | 3
|
2021-12-23T22:02:02.000Z
|
2022-01-10T14:16:24.000Z
|
tests/unit/intersection/test_policy_shard.py
|
etta-trust/PolicyGlass
|
72157189a9af3172e6efbdcc2050969796cfa99f
|
[
"MIT"
] | 1
|
2022-02-22T11:03:27.000Z
|
2022-02-22T11:03:27.000Z
|
import pytest
from policyglass import PolicyShard
from policyglass.action import Action, EffectiveAction
from policyglass.condition import Condition, EffectiveCondition
from policyglass.principal import EffectivePrincipal, Principal
from policyglass.resource import EffectiveResource, Resource
def test_bad_intersection():
with pytest.raises(ValueError) as ex:
PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
).intersection(Action("S3:*"))
assert "Cannot intersect PolicyShard with Action" in str(ex.value)
INTERSECTION_SCENARIOS = {
"test_subset": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:GetObject"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:GetObject"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
},
"test_exactly_equal": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
},
"test_disjoint_conditions": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset({Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"])})
),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{
Condition(key="s3:x-amz-server-side-encryption", operator="StringEquals", values=["AES256"]),
}
)
),
),
"result": None,
},
"test_matching_equal_one_with_one_without_condition": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{
Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"]),
}
)
),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
},
"test_matching_subset_conditions_larger_first": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset({Action("s3:PutObject")})),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset({Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"])})
),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{
Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"]),
Condition(key="s3:x-amz-server-side-encryption", operator="StringEquals", values=["AES256"]),
}
)
),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset({Action("s3:PutObject")})),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset({Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"])})
),
),
},
"test_matching_subset_conditions_smaller_first": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{
Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"]),
Condition(key="s3:x-amz-server-side-encryption", operator="StringEquals", values=["AES256"]),
}
)
),
),
"second": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset({Action("s3:PutObject")})),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset({Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"])})
),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset({Action("s3:PutObject")})),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{
Condition(key="s3:x-amz-server-side-encryption", operator="StringEquals", values=["AES256"]),
Condition(key="aws:PrincipalOrgId", operator="StringNotEquals", values=["o-123456"]),
}
)
),
),
},
"test_allow_without_condition_vs_deny_with_condition": {
"first": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(inclusion=Resource("*"), exclusions=frozenset()),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
"second": PolicyShard(
effect="Deny",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(
inclusion=Resource("*"), exclusions=frozenset({Resource("arn:aws:s3:::examplebucket/*")})
),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
effective_condition=EffectiveCondition(
frozenset(
{Condition(key="s3:x-amz-server-side-encryption", operator="StringNotEquals", values=["AES256"])}
)
),
),
"result": PolicyShard(
effect="Allow",
effective_action=EffectiveAction(inclusion=Action("s3:*"), exclusions=frozenset()),
effective_resource=EffectiveResource(
inclusion=Resource("*"), exclusions=frozenset({Resource("arn:aws:s3:::examplebucket/*")})
),
effective_principal=EffectivePrincipal(inclusion=Principal(type="AWS", value="*"), exclusions=frozenset()),
),
},
}
@pytest.mark.parametrize("_, scenario", INTERSECTION_SCENARIOS.items())
def test_intersection(_, scenario):
first, second, result = scenario.values()
assert first.intersection(second) == result
| 53.830357
| 119
| 0.627799
| 918
| 12,058
| 8.127451
| 0.082789
| 0.160434
| 0.172631
| 0.109771
| 0.900281
| 0.900281
| 0.900281
| 0.900281
| 0.900281
| 0.900281
| 0
| 0.010362
| 0.23163
| 12,058
| 223
| 120
| 54.071749
| 0.794927
| 0
| 0
| 0.767442
| 0
| 0
| 0.116603
| 0.035246
| 0
| 0
| 0
| 0
| 0.009302
| 1
| 0.009302
| false
| 0
| 0.027907
| 0
| 0.037209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
689cd018be661cc101bfa18518d113bc1752905d
| 6,831
|
py
|
Python
|
release/stubs.min/System/Windows/Forms/__init___parts/ScrollBarRenderer.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/ScrollBarRenderer.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/ScrollBarRenderer.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
class ScrollBarRenderer(object):
""" Provides methods used to render a scroll bar control with visual styles. This class cannot be inherited. """
@staticmethod
def DrawArrowButton(g, bounds, state):
"""
DrawArrowButton(g: Graphics,bounds: Rectangle,state: ScrollBarArrowButtonState)
Draws a scroll arrow with visual styles.
g: The System.Drawing.Graphics used to draw the scroll arrow.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll arrow.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarArrowButtonState values that specifies the
visual state of the scroll arrow.
"""
pass
@staticmethod
def DrawHorizontalThumb(g, bounds, state):
"""
DrawHorizontalThumb(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a horizontal scroll box (also known as the thumb) with visual styles.
g: The System.Drawing.Graphics used to draw the scroll box.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll box.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll box.
"""
pass
@staticmethod
def DrawHorizontalThumbGrip(g, bounds, state):
"""
DrawHorizontalThumbGrip(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a grip on a horizontal scroll box (also known as the thumb) with visual styles.
g: The System.Drawing.Graphics used to draw the scroll box grip.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll box grip.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll box grip.
"""
pass
@staticmethod
def DrawLeftHorizontalTrack(g, bounds, state):
"""
DrawLeftHorizontalTrack(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a horizontal scroll bar track with visual styles.
g: The System.Drawing.Graphics used to draw the scroll bar track.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll bar track.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll bar track.
"""
pass
@staticmethod
def DrawLowerVerticalTrack(g, bounds, state):
"""
DrawLowerVerticalTrack(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a vertical scroll bar track with visual styles.
g: The System.Drawing.Graphics used to draw the scroll bar track.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll bar track.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll bar track.
"""
pass
@staticmethod
def DrawRightHorizontalTrack(g, bounds, state):
"""
DrawRightHorizontalTrack(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a horizontal scroll bar track with visual styles.
g: The System.Drawing.Graphics used to draw the scroll bar track.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll bar track.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll bar track.
"""
pass
@staticmethod
def DrawSizeBox(g, bounds, state):
"""
DrawSizeBox(g: Graphics,bounds: Rectangle,state: ScrollBarSizeBoxState)
Draws a scroll bar sizing handle with visual styles.
g: The System.Drawing.Graphics used to draw the sizing handle.
bounds: The System.Drawing.Rectangle that specifies the bounds of the sizing handle.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarSizeBoxState values that specifies the
visual state of the sizing handle.
"""
pass
@staticmethod
def DrawUpperVerticalTrack(g, bounds, state):
"""
DrawUpperVerticalTrack(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a vertical scroll bar track with visual styles.
g: The System.Drawing.Graphics used to draw the scroll bar track.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll bar track.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll bar track.
"""
pass
@staticmethod
def DrawVerticalThumb(g, bounds, state):
"""
DrawVerticalThumb(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a vertical scroll box (also known as the thumb) with visual styles.
g: The System.Drawing.Graphics used to draw the scroll box.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll box.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll box.
"""
pass
@staticmethod
def DrawVerticalThumbGrip(g, bounds, state):
"""
DrawVerticalThumbGrip(g: Graphics,bounds: Rectangle,state: ScrollBarState)
Draws a grip on a vertical scroll box (also known as the thumb) with visual styles.
g: The System.Drawing.Graphics used to draw the scroll box grip.
bounds: The System.Drawing.Rectangle that specifies the bounds of the scroll box grip.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll box grip.
"""
pass
@staticmethod
def GetSizeBoxSize(g, state):
"""
GetSizeBoxSize(g: Graphics,state: ScrollBarState) -> Size
Returns the size of the sizing handle.
g: The System.Drawing.Graphics this operation will use.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the sizing handle.
Returns: A System.Drawing.Size that specifies the size of the sizing handle.
"""
pass
@staticmethod
def GetThumbGripSize(g, state):
"""
GetThumbGripSize(g: Graphics,state: ScrollBarState) -> Size
Returns the size of the scroll box grip.
g: The System.Drawing.Graphics this operation will use.
state: One of the System.Windows.Forms.VisualStyles.ScrollBarState values that specifies the visual
state of the scroll box grip.
Returns: A System.Drawing.Size that specifies the size of the scroll box grip.
"""
pass
IsSupported = True
| 28.344398
| 117
| 0.695506
| 843
| 6,831
| 5.635824
| 0.088968
| 0.039992
| 0.080825
| 0.042938
| 0.805515
| 0.793307
| 0.789939
| 0.773942
| 0.755841
| 0.74658
| 0
| 0
| 0.242131
| 6,831
| 240
| 118
| 28.4625
| 0.917713
| 0.761089
| 0
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.315789
| false
| 0.315789
| 0
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
9bee414d84d4876d2e0c8aa66671b7c039a65d5b
| 14,930
|
py
|
Python
|
install/app_store/tk-framework-widget/v0.2.6/python/thumbnail_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-widget/v0.2.6/python/thumbnail_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-widget/v0.2.6/python/thumbnail_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | 1
|
2020-02-15T10:42:56.000Z
|
2020-02-15T10:42:56.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from tank.platform.qt import QtCore
qt_resource_data = "\x00\x00\x08 \x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x91\x00\x00\x00\x91\x08\x04\x00\x00\x00i\xd1\x92\xa8\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x00\x02bKGD\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe1\x03\x0e\x10\x12\x0bE\xb3eO\x00\x00\x07\x13IDATx\xda\xed\x9dMlTE\x1c\xc0\x7f\xaf\xddm\xa1\xc5 P\x0a\xc5\xa4\x82T\x11\x0c\x15\x90D\x0dx(\x22\x1e\xb8\x98\x18\x221A\x8d\x1fED\xb8\xec\x1e\xda\x83\x1c\xea\x81\x0dv\x13BP8h\xd0\xa8'\x88$\xc6\x0f$\x1aH\xe4`\x0f\x1a\xfc\xe2\xcb\xa2\x06\x82\xe5c\x1bR>J\xbb\xdb\xf6y(\x85\xee\x9b\xb7;\xdb\xdd73\xdb\xed\xfcN\xbc\xd9\xb7\xb3\xff\xf7c\xe6?\xef\xcd\x9b\xf7\xea\xb8X\xb2Sf:\x80\xe2\xc7*\x92b\x15I\xb1\x8a\xa4XER\xac\x22)V\x91\x14\xabH\x8aU$\xc5*\x92b\x15I\xb1\x8a\xa4XER\xac\x22)V\x91\x14\xabHJ\xc8[p.\xff\xba\xeaif\x05\xf7I\xb4\xf7q\x92c\xec\xa5\xdf\xf4\xa1g;\x90\xd18\xdeY\xc7\xbc\x15\xbd\xc2\x07L\xcey\xef\x7fh\xe6{\xd3*2\x91\xae(\xa8\x8e\xf6<\xfb\xc6 \x08\xe6q\x88-\xa6U\xe4F0\x8aj\xf9h\xcc\xdf)g\x17{\xc4\x8e^|\x04\xa3\xe8=\xa6\xe6\xf1-\x977\xf9\x8e\xe9\xa6\x15\xc8\x08B\xd1\x0a6\x90\xcf\x8d\x14\x07\x97Ut\xb0\xc0\xb4\x84\xec\x14\xae\xa8\x9c\xf7qp\xf2\xfa\xae\x034\xf0\x13kLk\xc8F\xe1\x8a\xde\xe2\xd1\x02k\xb8\x97o\x8a9u\x17:\xe8\xd7r&\xaf<$\xb2\x97-\x0c\x98\xd61L\xb0\x83\xfe\x8e\x80\x04\x15q\xea.L\xd1\x0a^\xca+Q\x8b\x14q\xea.DQ!\x89Z\xa4hSw!\x8a\x0aO\xd4\x22E\x98\xba\xf3O\xd7\xc1%j\x11\xc3\xa9;\xa8t\x1dT\xa2\x16)\xb2\xd4\x9d\xaf\xa2\xe0\x12\xb5H\x91\xa5\xee\xfc\x14\x05\x9b\xa8E\x8a*u\xe7\x96\x8b\x1c\x16\xb0\x84zfR\xcddB\xcc\xe2\x19-\xd1\x0dr\x90d`\xb5\xf5\xd1G?\xd78\xc9\x1f\x9c\xce\x96\xeb\xc6:\xa5\xb6\x90\x8d\xbc\xc0l-J\xf4\x91\xe44\xc7\xf8\x82\xa3~\xaa\xc6\xa2h\x0a;h\xa6\xdc\xf4\xf1(\xc0\xbd\x9d&.\xb1\x93=\xf4\xa4\x7f\x98\xbb\xa2\xb9|\xc9\xe2;\x95\x95.\x096q`tA\xae\x83\xfel\x8e\xb2\x18J^\x10\xd4\xb0\x9f\xdd\x99\x8f3\xd3\xc4h\x05_q\xbf\xe9\xd8\xb5\xe1\xb2\x19x\xdb\xff\xc3L\xad(\xc2c\xa6\xe3\xd6\x88\x83\xcbf\xde\xc8\xf0\xa1o.\xaa\xa3\x93*\xd3qk\xa7\x8f\xe5\xfc\x09\xb9\xe5\xa2\xd7'\xa0 \x98\xc4\x87~>\xfc\x14\x95\xf1\x9a\xe9h\x8d\xe0\xf2\x04\xeb\xc5b?E\xcb'P\xa2\x1e\x8d\x03D\xc5b?E\xabM\xc7j\x90\xa5\xac\xf2\x16\xf9)z\xdat\x9cFi\xf6\x16\xf8)Zj:J\xa3\xac\xf6:\x11\x15\xcda\x9a\xe9(\x8d2\xc3\x9b\x89EE\x8f\x98\x8e\xd18\x0d\xe9\x9b\xa2\xa2\xb9\xa6#4NM\xfa\xa6_G\x9b\xe8\xdc\x93\xbe)*\x9ae:B\xe3x\x96\x92\x89\x8a\xaaMGh\x1c\xcf\xec\x87\xa8\xa8\xd2t\x84\xc6\xf1\xcc\x1c\x89\x8an\x9a\x8e\xd08RE\xb6\x15y\x10g\x1d\xc7\xb2\xee5\x18\xe2#\xff\x18\xec\xed\xeeHtL\xaa\xa9m\x9a2\x7f\xa4\xcc\x8d\x98\x9e\x19\x16\x15Uh\xfc\xf58@\xef\xb9\xaa]\xc3\x9b\xe5\xd4R\x0b\x09N\x8d\xecp\xb2n!`V\x949EqH&*bd\x9d\xbd[\xd8E\x14~\xabj\x04\x88\x18\xf0\x83\x9f\x22\x0d+\xa1\xdd\x98\x13\x1e\xec-\xdf\x96\xeb\xffFc/\xd1\xce\xf9\x0d`D\x93\x09Eq'Lt\xac\xf7/\x1b\xce\x12\xfd\xafi\x0en\xcai\xd1\xe4\xe66\xe2\x88\xa6\xfa\xdek<\x99\xf0\x9b\xdb\xcb\x859G\x88:\xe1\xbb\xe9]\x0f\xba\x15\xc5/\x1f\xa9\x88\x15TC\x14\xf4J*\xcb\xa1$8\xe2\xc7[k\xbf.\xb8\x96h2\xa1S\x92\xceV\x14?\x11[\x92\x0a\xa2\xa2\x8a\x98\x9b\xd2'I_+\x8aww,J\x04U\x99\xd3\x0ana\x1d6gt)\x8a\x0f\xf4\xcc\xd8\x1fh\x8dQ'\xac$R\x01-\x8a\xdc\x18\x84\xde\x0d\xba\xd6\x131=\x9dM\x8b\x22'\x9c\xef0\x9f\x8dE\x097\xa5\xa3\xb3iP\xa4\xee0\x9cV\x1d\x9dM\x83\x225mh\x98\xc1^\xf5\xedH\x14\x12\xf0%\xb5\x1b\x1b\x0ad\xa0\xf7\xa7|\x9b\xfav$^\x91\x05\xac\xc8\x09;\xca\xda\x90\x1e\xf4\x9e]+\xe0\xefOT\x8fk\x8a;\x9a\xfa\xd7\xb3>\xf0\xbb\xea_P\xdc\x8a\x9c\xf8\x09M\xe7\xc0\xeaP\x9e\xae\x83\xbb\xe8\xc8\xc4\x8d\xb3j\xebW\xaeH=\xaa\xb3Q\x09(j\xecU[\x7f\x09(R\x8dU$E\xb1\xa2\x81\x9e\xc2\xeb0\x8d\xe2\x13\xc5\x90\xaa\xe7g5b;\x9a\x14\xabH\xca8\xbb\x223A\x09\xb4\xa2\xeeuj\xeb/\x01E3\x1eW{\xa7_yG\x1bhS\xfd\x0b\xaaQ\xdd\x8a\x22!\xc5O\xb6\xfd\xaa|\xd6q\xdcw\xb4\xc66W\xf1\x82\x1a\x0d#Z\x9f\xd2Cp\xc2\xaa\xffO\xd5\xb7\xa2\xc8\xa4:u\xe1\xdf\xdc\xaa\xae\xee\x11\xb4\x9c\x17\x0d\xbc\xa3\xaa\xe6\xeaz\xf5\xeb\xd6t\xe4\xa2\x88\xaa+\xb5\xa4\x96\xf5jZZ\x91\x9b\x1a\xda\xae\xa2\xde\x8a\x1a\x1dk\x1f\xb5(rZ\xca\xc2W_\x0c\xbc\xdavW\xe1M\xcc\xbb\xe8\x1a\xf4#\xd3\x96\x9d\x0a4m\xbb\xdbA\xcf\xc2P}\x97\xb1\x91\x87\x03\xec\x14\xbd[\x9d\xb0\xae\x05\xc6\x1a\xaf\xf4\xdd\x14\xed\xc1\xd4tim\x95\x86\x91l\x04\x8dg\xd7NK0\x92\xfa[f5\xe9\x5c\xa2\xaeu\xbe\xc8i\x01\xda/\x16\xf6\x12\xb9\xf6J-\xe3\xd8]t\xbf\xd8=\x02\xb3\xe9_V\x99\xefm\xecv\xb4?\xe4`b\xd61RYC\xfb\xe5\xb5c\xfd\x9a\xbb\x9dv7\xa5\xff)\x103\x7f\x1e \x02\xb5\xd0tn\x7f}Gn_\x18h\x0bU\x91\xc2\xc8#W\xca\x97`e$\x02\xf5\xb0n(U\xd6\x9am\xb7\xc4s5+\xa1\xdc\x90\x1e\xf0S\xd4E\xb5\xb6\xb6\x15\x01'6<\xca\x0d\xf4\x5c\xe9\xb8rl\xe4\xfe|\xe7\xfc\x99+\xa7.\x06\xa8\x19\xdeK\xa7\x13\xcf\x8f\x892\xea\xf4\xbe\xc7q\xe4\x0c9D\x1duwVx4\x18{@\x0f\x18J\xdf\xf4\xebh\xe6\xe6\x1d\xcdi\x19\xcd\xad\xf4M{\x1fM\xc4\xb3\x18GT4\x94cE\xa5\x8bTQpo\x06\x1e\xaf\xdcH\xdf\x14\x15\xdd\xca\xb1\xa2\xd2\xa53}SL\xd7\xd7LGh\x14\x97\x01<\xcbK\xc5Vt\xd1t\x94Fq8\xee}\x03\xb6\xa8\xe8/\xd3Q\x1a\xe6\x07o\x81\xa8\xe8\x17\xd31\x1a\xe6\xb0\xb7\xa0\xccw\x17\xf5\xcf%\x14+g8\xea-\x12\x15\x9d\xe7\xf8x\xbb\xaf\x1f \xbb\xc5\xe6\xe1wv\xbd\xcbt\x9c\xc6\xb8\xce\xc7b\xa1\x9f\xa2\xcf\xf9\xd7t\xac\x06p\x816\xae\x8b\x1f\xf8)J\xb2\x91\x89\x97\x8f\x1c\xbe\xf5\x7f\x96\xc4\xff2\xf60;q&\x98\xa4.^\xf6?\xe2\xcc\xef\xde?8\xa1$]\xe0Y\xae\xf8\x7f\x94I\xd1\x10\xeb\xd97!F6\x178\xc1\x93d|:2\xf3|Q\x92W\xd9\xc0\x85;\xd5\x94*\x0e\x07X\xc9\xf9\xcc;d\x9fR\xfb\x8c\x87\xd8\xc4\xcf%\xdc\x9aN\xb3\x86u\x5c\xcd\xb6Kn\x7f\xd9j\x1e\xaby\x8a\x07i\xf0\xbe\xc5w\x1c\xd3\xc5\x8f|\xca\xa1B\xffl\x93\xc8T\x1a\x98N\x98\x10a\xc6\xdb\x0c\xe5p\x8fIq\x83n.r)\xf3\x8e\x12E\x16/vz_\x8aU$\xc5*\x92b\x15I\xb1\x8a\xa4XER\xac\x22)V\x91\x14\xabH\x8aU$\xc5*\x92b\x15I\xb1\x8a\xa4XER\xfe\x07\xfd\xc0h\xdb\x90\xd1\xcd\xab\x00\x00\x00%tEXtdate:create\x002017-03-14T16:18:11-07:00!\x81\x11\xf3\x00\x00\x00%tEXtdate:modify\x002017-03-14T16:18:11-07:00P\xdc\xa9O\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x0b\x13\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x91\x00\x00\x00\x91\x08\x04\x00\x00\x00i\xd1\x92\xa8\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x00\x02bKGD\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe1\x03\x0e\x10\x12\x0bE\xb3eO\x00\x00\x0a\x06IDATx\xda\xed\xddy\x90\x15\xc5\x1d\xc0\xf1\xcf[XYW.\x8f\xe8\xae\x80FE@\x8dG\xbc\xcaZ\x04D$*\x18\x8f\xc4P\xa2\xf1*\x0c\xa6biU\xb0 \xc6*M@\x84R\xb2\xa1\xbc\xcbhi\x88\xa5`\x0aM\xa2\xa2 R*\xdehTRx\xe2\x05\x88+B\xe4\x0c \xcbB\xfe\xd8\xe7\xb1o\xde\xee,\xbb\xafgVx\xdf\xbfv\xfb\xcd\xeb\xfe\xcd\xf7\xf5\xf4\xcct\xf7\xf4d\xb6*\xd24%i\x07\xd0\xf6)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\xda\xe7&,ny^\x9d]h\x88}\xed\xd4\xe4Vu>\xf0\xba[|\x91\xf6\xae7\xce>\x0d\xfe\xcb\xe4\xf6:\xb6XQ\x95\x07uo\xf6\xd6k\x5c\xed\xb6\xb4U4F\x18E\xbd\xcd\xd3\xd9\xabn\xf4\x96MMn\xd9NO\x17;\x1b\xf7\xf8\xb5\xda\xb4u$\xa5\xa8\xc4+\x8e6\xc7\xe9\xd67\xf3\x1bg\x9b\xa2\xdc\x5c?\xb7\x22m!q\x8a\x0a\xd3\x5c\xff\xca\xd1\x968\xa3\xd9\x82\x98\xae\x9f\xa5\xfa\x9b\xe7\x90\xb4\x85\xc4Q\x08E\xbb\x9b\x80\xdf\xfa\xdf6}\xebu\xc7\x98g?/\x1a\x92\xb6\x84\xa6)\x84\xa2\x89v3\xdbC\xdb\xfc\xbd\x1a\x03L\xd3\xd9\xa3F\xa5\xad\xa1)Z\xaf\xe8\x18#lry\x8b\xbe\xbb\xd1p\xd7\xca\xa8v\xb7\xd2\xb4U4Fk\x15\x95\xb8]\x89?{\xaf\xc59\x5cg\x98\xf5Fx\xca\x1ei\xcbhl\x17[G}C=\xbeUy\xb4\xf1\xa6\xbbu\x8a\xea\x1b\xeaQ\xdb\xd8PGi\xd3Mw\xeb\x14\xd57\xd4\xd3\x0b\x10G\x1bn\xba[\xa3\xa85\x0du\x946\xdbt\xb7\x5cQ\xeb\x1b\xea(m\xb2\xe9n\xb9\xa2B4\xd4Q\xda`\xd3\xddRE\x85j\xa8\xa3\xb4\xb9\xa6\xbb\xa5\x8a\x0a\xd7PGicMws\xef\xf4\xbb;\xca~*tT\xae\xbdR\xc3lvXA\xdb\xa1\x5c\xae1V\xc6\x1c\xcb\x0a\xb5\xa7\xbe\xb2\xd1F\xeb}`\x81\xb7\x9a\xba\xe5nx\xa7\xdf^\x1c?p\xa9s\x1d\x14I\xaf\x0e*\x88\xeb\xbcc\x8aA\x81r\xdf\xe2c\xaf\xfa\xa7\x19\xd6\xc5m\xdat-\xca\x18\xe5Z\x9d\xb1\xdak>\xb6\xd4\x1ak\xd5\xaa\xb3\xd6\xa3\xea\x82*\x82\x1f\xaa*`n\xa5\xca\x94\xe9\xe4 ?\xd2'\xdb}\xbc\xc1=&Y\xd4p\xc3\xe6w\xa9\xed\xe2>g\xd9\xe2\x01wx\xd9\x96\xe0B\x92\xa4\xbd^\x06\xfb\x99~2j\x8d3\xf1\xbb?xs\x15\x95\x9aa\xb0\xcf\xfc\xc2\x8bi\xefO@\x0e\xf6;\xe7\xcb\x98\xeb4k\xf3+j\xfc\x8cv\xab\xc1\x96\xe8\xbb]\x0b\xe2m\x17\x1ad\x89\xfe\x1eW\x9e\x7f\x93\xc6\x14\x0d4\xd2z?\xf5I\xda\xfb\x90\x00O\xebg\x91\xe3\x1b\x1b\x91\xc9\xaf(\xe3\x16\x8c7?\xed\xe8\x13b\x91\xa1\xd6\xbb\xc8y\xf9>\xcc\xafh\x90C,R\x9dv\xe4\x09\xf2\x96\xcbq\xb3=\xa3\x1f\xe5W4\x12w\xc7\x8c\x87mo\xdcc\xa6\xdd\x5c\x1b\xfd \xdf\x19\xad\x83\x95\xca\xf4\xb04\xed\xa8\x13\xe6 o\xd9\xa0\x87/\xe3\xcfh}\xed\xec\xcd\x1dN\x10\xef\x98\xa1\xdcor\x93\xf3):\x09O\xa5\x1do*\xfcI}#\xd3\x80|\x8a\x8e\xc2\xf3iG\x9b\x0a\xcfZ\xa6\x87>\x0d\x13\xf3):\x14\xffI;\xda\x94x\x1eG6L\x8a*\xda]\xa5\xb5\xb97v;\x0c\xef\xa3g\xc3\xa4\xa8\xa2\xfd\xb1\xd0\x8e\xfa\xb0c\x0d\xb9\xfd\xe6QE=\xb0$\xedHSc-:7L\x8a*\xaa\xc0giG\x9a\x1a\xeb\xb1s\xc3\xa4\xa8\xa2]\xb12\xedHS\xa3N\xa4'6\xaa\xa8\x13\xd6\xa4\x1dijlA\xa6aR\xb4\xef\xba\x1d\x09t\xb9\xe6#c/\x9du\xc4:k,k+\xa7\x8c\xa8\xa2\x9d$=\x1b\xfb@'\xe8\xefp=\x1b\xb4\x02\xeb}h\xbe\xb9\x9e\xb10\xc1X2\xd1\xa4\xa8\xa2R\xe2G\x05\x0aD\xa5\x0b\x0cw\xf87\xffo\xf5_\x9f\xa3\xc2\xee\xca\x1d\xeaP\xbf\xc4|S\xfdMM\x22\x11\xe5\xa9\xb9\xf9\x0f\xb4\xcd\x09\x04s\xa01\xce\xd7\x01\x9b\xbd`\x86\xb9\x96Z\xf6\xcd$\xe3R{\xe9\xa6\xbf\xa1\xfa:\xdc\xe1\xc6\xba\xcf\x8d\x89\xd6\xa7&\x14\x95\x10|\xb4\xa3\x93?\xb8B)\x1e1\xd5L\xab\x22[\xd4\xfa\xd4\xa7^1IW\xa7\x18\xeet\x97\xb8\xd0\xcd\xc6~\xdb\x09\x9f\x14\xd1V\xa7]pE'y\xcf\x95J=\xe5Xg\x98\x96G\xd0wYe\x9a3\x1c\xeb)\xa5\xae\xf4\x9e\x93\xd2W\x94\x09\xaa\xa8\xc4\xf5f\xa94\xcf\x89\x06{\xb5\xd9\xdf{\xd5`'\x9a\xa7\xd2,\xd7'{:)\xc9\x9b\x12JQ\x99\xe9\xaeVb\xb2*Oo\xf3\xb7\x9fVe\xb2\x12W\x9b\xae,ME\x19\x02]\x91t4\xcbY\xber\x91Q-\xbc\xf2\xaa3\xcaE\xber\x96Y:n\x7f\x8aJ=\xac\xbf\x1a\x03LiU>S\x0cP\xa3\xbf\x87\x93\x9a\xee\x97\xdc\x81v\xaf\xc1V\x1a\xe0\x95V\xe7\xf4\x8a\x01V\x1a\xec\xde\xf0z\xbe\x16\xd2\x900\xb5\xe82\xe7\xa9sN\x81\xael\x16:G\x9d\xf3\xa2]\xf1!HF\xd1a\xaa1\xda\x93\x05\xcb\xf1I\xa3Q\xed\xb0\x90r\xeaIBQ\xc6]:\x98brAs\x9dl\x8a2w\xe5\xbb\xab*,I(\xba\xd8\xb1V\x14l~\xf6\xb7\x5cn\x85c]\x1cJ\xcd\xd7\x84W\xd4\xc1xL\x0cp\xe3\xb0\xd6D\x8c\xd7!\x94\x9cz\xc2_\xa7\x9e\xaf\xd2\xa7n\x0f\x92\xf7\xed>U\xe9\xfc\xb0;\x10\xbe\x16]\x89\xb16\x06\x89~\xa3q\xd9\x12\x02\x12Z\xd11\xfa\xf8\xc2_\x83\xc5\x7f\xaf\xe5\xfa8:X\xfe\x1aST8\x86\xe3\x89\x80\xfdO\x9b=\x8es\x83\xe5\xaf\xb1\xb6\xa8p\xb5h(\x1e\x0b\xb9\x03\x1e\xcb\x96\x12\x8c\xb0\x07Z\x85^j\x0bx\xc1\x98\x8f'\xd5\xea\xa5\x22\x5c\x01a\xcfh}\xf1\x5c\xe0!\xa75\x9e\xa3\xa0S\xd8s\x08[\x8bzcA\xb8\xe0\xb3, w\xc2K!\x09[\x8b\x0e\xc4\xe7AK\x90-\xe1\xc0p\xd9\x87\xadE{\x93\xc0\xe0NM\xb6\xa4@\x84\xadE\x1d%U\x8b\x02\xf6A\x86\xadE\xe5X\x1e.\xf8,\xcb\xb3%\x05\x22l-\xda\x88\xaeAK\x90-!\xcc\x0d\x0eB\xd7\xa2u\x84\xbcb\xc9R)\xe8\x10{\xd8Z\xb4<\xbb\x03a\xa9\x10\xf4p\x0e\xab\xe8\x03I\xd5\xa2\x0f\xc2e\x1fVQ\x9e\xf9\xa7\x018 [R \xc2\xb6E/cP\xcc\x22\x86\xade'\x83\xb2%\x05\x22\xf4\x81\xf6\x99\x8e\x06\x04-c\x80\x8e>K\xf6@+l\x7f\xd1\x938-\x5c\xf8\xd9\xdc\x83\xf6%\x84\xee/\x9a*\x09ESC\x16\x10\xba{\x7f\x8ee\xf6wj\xb0\xfcO\xb5\xbfe\xe6\x84\xdc\x85\xd0\x8a\xea\xdc\x8a\x09\x81\x06\x043&\xe0\xd6\xb03|\xc3\x0f\x12\xddf\x9d#\x0c\x0b\x92\xf70GX\x17z\xad\xda\xf0\x83D+Uc\x5c3V'\xd9V\xda\x1b\x87\xea\xd0\xcf\x1a$1%\xee\x06\x8b\xf42\xba\xe0\xf9\x8e\xd6\xcb\x227\x84\x0e?\x09E\x1b\x5c\x86\xf1\x05^\xb2i\x88\xf1\xb8\xcc\x86\xd0\xe1'3\xb1r\x86\x9b\x94x@\xef\x82\xe5\xd8\xc7\x03J\xdcdF\xf8\xe0\x93\x9a{:\xc6<]<R\xa0\xde\xa3\xae\xfe\xa5\x8by\xc6$\x11zR\xb3\xd46\x19\xea}\xbd<Q\x80;\xff\x0aO\xe8\xe5}C\x93Yl!\xb9\x19\xcc+\x9cl\xb1\xe3\xbc\xd6\xca\x11\xf8\xa3\xbd\xe68\x8b\x9d\x9c\xd4b\xe2IN\xf2\xfeD\x95\x05\xbay\xae\x15c\xf0\xe7zN7\x0bT%\xb7&N\xb2\x8fU-\xd5\xcfLe\xeewg\x0b\x0e\xb8\x0aw\xba_\x99\x99\xfa%\xb9\x16E\xd2\xef\x01Ye\x881j\x8d\xf4\xa1\x89vm\xf6\xf7v5\xd1\x87F\xaa5\xc6\x90\x98\xa7F\xbe\xe7\x8a\xd8j\x92#=\xa3\xdcU>rM3z%{\xba\xc6G\xaeR\xee\x19G\x9a\x94\xf4\xd3\x8e\xe9\xbcMf\x81\x81\xce\xf6\x86\xae\xc6Y\xe8]\xd5\x06\xe6\x99\x8b_j\xa0j\xefZh\x9c\xae\xdep\xb6\x81\x09\xcc\x10\x88\x10\xbdsZ\xe6\xa3D\x9e |\xc8C\x06\x1bi\xa8\xdez\x1be\x8b\x15j|\xae\x06\x95*T\xda#\xfb\x03n0\xc3_\xccN\xccI\xec\xe3\xc3\x15\xf6K\xe4\xa9F\x98m\xb6N\x86:\xc1\x00\xbd\xeci\xcf\xef<\x04\xca\x16\xefz\xd63f$\xfc\x98^\xce\x81\x9c\xef\xfe;\xa3{\x82\x15z\xadi\xa6\xa1\xcc\xfe\xf6\xd3%\xfb\x84\xf5j\x1f\xfb(\xe4\x18k#\xecL\xee\xd2\xaa\x85\xef\xa2h)\x1b\xbd\xed\xed\xb4\x83P.\xa2(\xda\x5co\x95V#\xde\x16\xd8\x99\xdc\xbe\x83\xa8\x8cM\x04\x1e\xf9j\xcbt\x15\x99\x1f\x10U\xb4\x81$\x1f\xablc\xf4\x16\x19\xfc\x8e*Z\xc36\x5c\xf5no\x1c\x82w\x1a&E\x15\xd5_\x97\xec\x98tq\xa8M\xb9\x8b\xa4E\x15-\xc4\xc1i\xc7\x9a\x12'h\xe7\xe5\xf83\xda\x1b8.\xedXSb8f\xe5&\xe6[\x1dt\x89\xee~\xec\xcd\xb4\xe3M\x9c\xbd}\x82}\xd5\xc4\xaf\x0e\xfa\x08.I;\xde\x14\xb8T\xa9\xe9\xd1\xfb\xd3|\xb5\xa8~\xad\xd5\x03\x12\x98\x0e\xdc\x96\xd8\xc7|]Uy\xa99\xab\xa6\xbf\xe3\x1f\xca\xdd\x91v\xcc\x89\xd2\xdeT]=\xec\xa5\xe8G\xf9o5\xae\xb0\xda\x99\xaeH;\xee\x04\x99\xa0\xca\x22#\xf2}\x94_\xd1R\x97\xdaj\xb2\xe1iG\x9e\x08%n2\xdaf\xc3\xf3w\xf86v\xc3\xfa\xa0\xdf+q\xbf\x89\xb9\xab\x1cnw\x94\x9b\xe6\x0a\x1b\x9d\x93\xef \x83v\x7f\xccIX\xfd\xf5\x1f/X\xe5'\xfa\xbbP\x9d\x85\xe1G\xceS\xe2L\x8f\xaa\xb2\xca\x103\xbfM\xec\xd2`\x93\xa6\xdf&\xd3\xd7M\x8e\xc2W\xe6\x9a\xe35\x1fZ\xbc\xdd\xbc2\xa5\xc2)F8\x1e\xffvA\xc3\x9e\xaam{As\xc6\x99.q\xb2v\xd9\xff7\xf9\xc4\x12\x9b\xd4\xdal\xeb\xf7\xac_\xa9\xfe\xc7mo\x17\xbb\xea\x96\x1d\xc7\xfb\xd2\xd5\xee\xca\xfd\xd9[\xf2\x0e\xeb=\x0c4\xd0Az\xea\x16~\x8d\x8e\x84Xm\x9ei\xfe\x9e\xef\xe9\x91\xd6\xbd\xe6\xbb\xcc\x01*\x95j\xafTIJ\xab\x88\xb6\x94\x8c\x0c\xea\xac\xf7\xa5\xe5\x167>\x1e\x17\xa3\xa8H.\xdf\xaf\xd6$\x15\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab)*\x8a\xa5\xa8(\x96\xa2\xa2X\x8a\x8ab\xf9?\x9bhs\x8d\x8d5L\x0d\x00\x00\x00%tEXtdate:create\x002017-03-14T16:18:11-07:00!\x81\x11\xf3\x00\x00\x00%tEXtdate:modify\x002017-03-14T16:18:11-07:00P\xdc\xa9O\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00\x03\x00\x00x\xc3\x00r\x00e\x00s\x00\x0d\x06\xe8\xc6\x07\x00c\x00a\x00m\x00e\x00r\x00a\x00_\x00h\x00l\x00.\x00p\x00n\x00g\x00\x0a\x0c\x91g'\x00c\x00a\x00m\x00e\x00r\x00a\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00,\x00\x00\x00\x00\x00\x01\x00\x00\x08$"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 710.952381
| 14,013
| 0.744943
| 3,356
| 14,930
| 3.302443
| 0.279201
| 0.05522
| 0.037355
| 0.016241
| 0.145448
| 0.145448
| 0.145448
| 0.140305
| 0.138952
| 0.133809
| 0
| 0.240833
| 0.006296
| 14,930
| 20
| 14,014
| 746.5
| 0.506201
| 0.00998
| 0
| 0
| 0
| 0.333333
| 0.974888
| 0.972655
| 0
| 0
| 0.000541
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
503e831a005682fd8851d0d93f9be8ac5bec42c3
| 37
|
py
|
Python
|
python/miniconda/vendored/vendor/noarch/zipp-3.4.1-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | null | null | null |
python/miniconda/vendored/vendor/noarch/zipp-3.4.1-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | 19
|
2021-03-10T21:30:56.000Z
|
2022-02-27T06:45:03.000Z
|
python/miniconda/vendored/vendor/noarch/zipp-3.4.1-pyhd3eb1b0_0/info/test/run_test.py
|
kvedurmu/paketo-samples
|
525b49f14883a6aa54959de3232430f0fdc1e66e
|
[
"Apache-2.0"
] | null | null | null |
print("import: 'zipp'")
import zipp
| 9.25
| 23
| 0.675676
| 5
| 37
| 5
| 0.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 37
| 3
| 24
| 12.333333
| 0.78125
| 0
| 0
| 0
| 0
| 0
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
acb6589eb75d57bbc11814f75785ed6003a87f6f
| 448
|
py
|
Python
|
pettingzoo/mpe/__init__.py
|
AnanthHari/PettingZoo
|
c147c2992a067fd529570db0bea6a0324f01ee6e
|
[
"MIT"
] | null | null | null |
pettingzoo/mpe/__init__.py
|
AnanthHari/PettingZoo
|
c147c2992a067fd529570db0bea6a0324f01ee6e
|
[
"MIT"
] | null | null | null |
pettingzoo/mpe/__init__.py
|
AnanthHari/PettingZoo
|
c147c2992a067fd529570db0bea6a0324f01ee6e
|
[
"MIT"
] | null | null | null |
from . import simple_adversary as simple_adversary_v0
from . import simple_crypto as simple_crypto_v0
from . import simple_push as simple_push_v0
from . import simple_reference as simple_reference_v0
from . import simple_speaker_listener as simple_speaker_listener_v0
from . import simple_spread as simple_spread_v0
from . import simple_tag as simple_tag_v0
from . import simple_world_comm as simple_world_comm_v0
from . import simple as simple_v0
| 44.8
| 67
| 0.859375
| 74
| 448
| 4.810811
| 0.202703
| 0.252809
| 0.404494
| 0.404494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022843
| 0.120536
| 448
| 9
| 68
| 49.777778
| 0.880711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
acd0d46588bf77150a9aa7454a571975a5134930
| 3,074
|
py
|
Python
|
test/test_main.py
|
terib0l/starlette-validation-uploadfile
|
681454b3e5745969739ed2845d57e07843cb3b78
|
[
"MIT"
] | null | null | null |
test/test_main.py
|
terib0l/starlette-validation-uploadfile
|
681454b3e5745969739ed2845d57e07843cb3b78
|
[
"MIT"
] | null | null | null |
test/test_main.py
|
terib0l/starlette-validation-uploadfile
|
681454b3e5745969739ed2845d57e07843cb3b78
|
[
"MIT"
] | null | null | null |
import os
from fastapi.testclient import TestClient
from main import app
client = TestClient(app)
def test_Uploaded_File_Goes_Through():
"""
Would upload file to "upload_file_first"
Upload File is jpeg, and size less than 12000 bytes
"""
size = os.path.getsize("./img/proper.jpeg")
with open("./img/proper.jpeg", "rb") as img:
response = client.post(
"/upload/first",
files={"file": ("proper.jpeg", img, "image/jpeg")}
)
assert response.status_code == 200
assert response.json() == {
"filename": "test.jpeg",
"content_type": "image/jpeg",
"file_size": size
}
def test_Uploaded_File_Is_Too_Large_Size():
"""
Would upload file to "upload_file_first"
Upload File is jpeg, and size more than 12000 bytes
"""
with open("./img/too_large.jpeg", "rb") as img:
response = client.post(
"/upload/first",
files={"file": ("too_large.jpeg", img, "image/jpeg")}
)
assert response.status_code == 413
assert response.json() == "Request Entity Too Large"
def test_Uploaded_File_Is_Unsupported_File_Type():
"""
Would upload file to "upload_file_first"
Upload File is png, and size less than 12000 bytes
"""
with open("./img/unsupported_type.png", "rb") as img:
response = client.post(
"/upload/first",
files={"file": ("unsupported_type.png", img, "image/png")}
)
response = client.post("/upload/first")
assert response.status_code == 415
assert response.json() == "Unsupported Media Type"
def test_When_No_Specified_In_App_Path_Uploaded_File_Goes_Through_Even_In_The_Case_Of_Too_Large_Size():
"""
Would upload file to "upload_file_second"
Upload File is jpeg, and size more than 12000 bytes
"""
size = os.path.getsize("./img/too_large.jpeg")
with open("./img/too_large.jpeg", "rb") as img:
response = client.post(
"/upload/second",
files={"file": ("too_large.jpeg", img, "image/jpeg")}
)
assert response.status_code == 200
assert response.json() == {
"filename": "test.jpeg",
"content_type": "image/jpeg",
"file_size": size
}
def test_When_No_Specified_In_App_Path_Uploaded_File_Goes_Through_Even_In_The_Case_Of_Unsupported_File_Type():
"""
Would upload file to "upload_file_second"
Upload File is png, and size less than 12000 bytes
"""
size = os.path.getsize("./img/unsupported_type.png")
with open("./img/unsupported_type.png", "rb") as img:
size =
response = client.post(
"/upload/second",
files={"file": ("unsupported_type.png", img, "image/png")}
)
assert response.status_code == 200
assert response.json() == {
"filename": "test.jpeg",
"content_type": "image/png",
"file_size": size
}
| 32.702128
| 110
| 0.593038
| 375
| 3,074
| 4.64
| 0.173333
| 0.086207
| 0.062069
| 0.082759
| 0.827586
| 0.790805
| 0.790805
| 0.770115
| 0.720115
| 0.647126
| 0
| 0.018059
| 0.27944
| 3,074
| 93
| 111
| 33.053763
| 0.767494
| 0
| 0
| 0.52459
| 0
| 0
| 0.237868
| 0.031026
| 0
| 0
| 0
| 0
| 0.163934
| 0
| null | null | 0
| 0.04918
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a8463400ddd78f6ab425116c76e147351c8b4129
| 46
|
py
|
Python
|
src/tests/data/__init__.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | 1
|
2020-11-10T22:50:14.000Z
|
2020-11-10T22:50:14.000Z
|
src/tests/data/__init__.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | null | null | null |
src/tests/data/__init__.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | 1
|
2022-01-31T19:23:03.000Z
|
2022-01-31T19:23:03.000Z
|
from . import chat
def run():
chat.run()
| 9.2
| 18
| 0.586957
| 7
| 46
| 3.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 46
| 4
| 19
| 11.5
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a84a94b9c5f361d47b3cf7798466d5a8d2b1c5a4
| 48
|
py
|
Python
|
samples/src/main/resources/datasets/python/27.py
|
sritchie/kotlingrad
|
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
|
[
"Apache-2.0"
] | 11
|
2020-12-19T01:19:44.000Z
|
2021-12-25T20:43:33.000Z
|
src/main/resources/datasets/python/27.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | null | null | null |
src/main/resources/datasets/python/27.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | 2
|
2021-01-25T07:59:20.000Z
|
2021-08-07T07:13:49.000Z
|
def bitwise4(w, x, y, z):
(w | x) ^ (y & z)
| 16
| 25
| 0.395833
| 10
| 48
| 1.9
| 0.6
| 0.210526
| 0.315789
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.333333
| 48
| 2
| 26
| 24
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a8674a32d2d36c134e18891848fbd76ca6d73cb7
| 1,632
|
py
|
Python
|
ygo_cards/migrations/0012_auto_20150428_2003.py
|
monovertex/ygorganizer
|
987a8de36b3864906ed499886e4c1a712d214310
|
[
"MIT"
] | 11
|
2015-08-20T15:29:33.000Z
|
2021-08-19T23:48:14.000Z
|
ygo_cards/migrations/0012_auto_20150428_2003.py
|
monovertex/ygorganizer
|
987a8de36b3864906ed499886e4c1a712d214310
|
[
"MIT"
] | 29
|
2015-08-20T13:25:59.000Z
|
2015-08-24T13:28:52.000Z
|
ygo_cards/migrations/0012_auto_20150428_2003.py
|
monovertex/ygorganizer
|
987a8de36b3864906ed499886e4c1a712d214310
|
[
"MIT"
] | 4
|
2015-10-25T07:04:45.000Z
|
2020-05-17T01:09:22.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ygo_cards', '0011_auto_20150428_1928'),
]
operations = [
migrations.AddField(
model_name='cardversion',
name='price_shift',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_180',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_21',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_3',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_30',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_365',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_7',
field=models.FloatField(null=True, blank=True),
),
migrations.AddField(
model_name='cardversion',
name='price_shift_90',
field=models.FloatField(null=True, blank=True),
),
]
| 29.672727
| 59
| 0.566176
| 153
| 1,632
| 5.830065
| 0.261438
| 0.161435
| 0.206278
| 0.242152
| 0.807175
| 0.807175
| 0.807175
| 0.764574
| 0.706278
| 0.706278
| 0
| 0.027853
| 0.318015
| 1,632
| 54
| 60
| 30.222222
| 0.773585
| 0.012868
| 0
| 0.666667
| 0
| 0
| 0.142324
| 0.014295
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a88d335d555297b700e811bd200955f6e5ae282e
| 32,600
|
py
|
Python
|
python/federatedml/protobuf/generated/lr_model_param_pb2.py
|
jat001/FATE
|
b402362fb82869651ba6122f4ec1cf5499a644da
|
[
"Apache-2.0"
] | null | null | null |
python/federatedml/protobuf/generated/lr_model_param_pb2.py
|
jat001/FATE
|
b402362fb82869651ba6122f4ec1cf5499a644da
|
[
"Apache-2.0"
] | null | null | null |
python/federatedml/protobuf/generated/lr_model_param_pb2.py
|
jat001/FATE
|
b402362fb82869651ba6122f4ec1cf5499a644da
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: lr-model-param.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='lr-model-param.proto',
package='com.webank.ai.fate.core.mlmodel.buffer',
syntax='proto3',
serialized_options=_b('B\021LRModelParamProto'),
serialized_pb=_b('\n\x14lr-model-param.proto\x12&com.webank.ai.fate.core.mlmodel.buffer\"\x85\x05\n\x0cLRModelParam\x12\r\n\x05iters\x18\x01 \x01(\x05\x12\x14\n\x0closs_history\x18\x02 \x03(\x01\x12\x14\n\x0cis_converged\x18\x03 \x01(\x08\x12P\n\x06weight\x18\x04 \x03(\x0b\x32@.com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.WeightEntry\x12\x11\n\tintercept\x18\x05 \x01(\x01\x12\x0e\n\x06header\x18\x06 \x03(\t\x12S\n\x12one_vs_rest_result\x18\x07 \x01(\x0b\x32\x37.com.webank.ai.fate.core.mlmodel.buffer.OneVsRestResult\x12\x18\n\x10need_one_vs_rest\x18\x08 \x01(\x08\x12\x16\n\x0e\x62\x65st_iteration\x18\t \x01(\x05\x12\x63\n\x10\x65ncrypted_weight\x18\n \x03(\x0b\x32I.com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.EncryptedWeightEntry\x12>\n\x06\x63ipher\x18\x0b \x01(\x0b\x32..com.webank.ai.fate.core.mlmodel.buffer.Cipher\x1a-\n\x0bWeightEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1aj\n\x14\x45ncryptedWeightEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.com.webank.ai.fate.core.mlmodel.buffer.CipherText:\x02\x38\x01\"\x93\x04\n\x0bSingleModel\x12\r\n\x05iters\x18\x01 \x01(\x05\x12\x14\n\x0closs_history\x18\x02 \x03(\x01\x12\x14\n\x0cis_converged\x18\x03 \x01(\x08\x12O\n\x06weight\x18\x04 \x03(\x0b\x32?.com.webank.ai.fate.core.mlmodel.buffer.SingleModel.WeightEntry\x12\x11\n\tintercept\x18\x05 \x01(\x01\x12\x0e\n\x06header\x18\x06 \x03(\t\x12\x16\n\x0e\x62\x65st_iteration\x18\x07 \x01(\x05\x12\x62\n\x10\x65ncrypted_weight\x18\x08 \x03(\x0b\x32H.com.webank.ai.fate.core.mlmodel.buffer.SingleModel.EncryptedWeightEntry\x12>\n\x06\x63ipher\x18\t \x01(\x0b\x32..com.webank.ai.fate.core.mlmodel.buffer.Cipher\x1a-\n\x0bWeightEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1aj\n\x14\x45ncryptedWeightEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.com.webank.ai.fate.core.mlmodel.buffer.CipherText:\x02\x38\x01\"}\n\x0fOneVsRestResult\x12M\n\x10\x63ompleted_models\x18\x01 \x03(\x0b\x32\x33.com.webank.ai.fate.core.mlmodel.buffer.SingleModel\x12\x1b\n\x13one_vs_rest_classes\x18\x02 \x03(\t\"\xa4\x01\n\x06\x43ipher\x12K\n\npublic_key\x18\x01 \x01(\x0b\x32\x37.com.webank.ai.fate.core.mlmodel.buffer.CipherPublicKey\x12M\n\x0bprivate_key\x18\x02 \x01(\x0b\x32\x38.com.webank.ai.fate.core.mlmodel.buffer.CipherPrivateKey\"\x1c\n\x0f\x43ipherPublicKey\x12\t\n\x01n\x18\x01 \x01(\t\"(\n\x10\x43ipherPrivateKey\x12\t\n\x01p\x18\x01 \x01(\t\x12\t\n\x01q\x18\x02 \x01(\t\"\x97\x01\n\nCipherText\x12K\n\npublic_key\x18\x01 \x01(\x0b\x32\x37.com.webank.ai.fate.core.mlmodel.buffer.CipherPublicKey\x12\x13\n\x0b\x63ipher_text\x18\x02 \x01(\t\x12\x10\n\x08\x65xponent\x18\x03 \x01(\t\x12\x15\n\ris_obfuscator\x18\x04 \x01(\x08\x42\x13\x42\x11LRModelParamProtob\x06proto3')
)
_LRMODELPARAM_WEIGHTENTRY = _descriptor.Descriptor(
name='WeightEntry',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.WeightEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.WeightEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.WeightEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=557,
serialized_end=602,
)
_LRMODELPARAM_ENCRYPTEDWEIGHTENTRY = _descriptor.Descriptor(
name='EncryptedWeightEntry',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.EncryptedWeightEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.EncryptedWeightEntry.key',
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.EncryptedWeightEntry.value',
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=604,
serialized_end=710,
)
_LRMODELPARAM = _descriptor.Descriptor(
name='LRModelParam',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='iters', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.iters', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='loss_history', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.loss_history', index=1,
number=2, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_converged', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.is_converged', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='weight', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.weight', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='intercept', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.intercept', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='header', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.header', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='one_vs_rest_result', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.one_vs_rest_result', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='need_one_vs_rest', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.need_one_vs_rest', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='best_iteration', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.best_iteration', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='encrypted_weight', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.encrypted_weight', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipher', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.cipher', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_LRMODELPARAM_WEIGHTENTRY, _LRMODELPARAM_ENCRYPTEDWEIGHTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=65,
serialized_end=710,
)
_SINGLEMODEL_WEIGHTENTRY = _descriptor.Descriptor(
name='WeightEntry',
full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.WeightEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.WeightEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.WeightEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=557,
serialized_end=602,
)
_SINGLEMODEL_ENCRYPTEDWEIGHTENTRY = _descriptor.Descriptor(
name='EncryptedWeightEntry',
full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.EncryptedWeightEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key',
full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.EncryptedWeightEntry.key',
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode('utf-8'),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value',
full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.EncryptedWeightEntry.value',
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=604,
serialized_end=710,
)
_SINGLEMODEL = _descriptor.Descriptor(
name='SingleModel',
full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='iters', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.iters', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='loss_history', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.loss_history', index=1,
number=2, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_converged', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.is_converged', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='weight', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.weight', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='intercept', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.intercept', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='header', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.header', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='best_iteration', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.best_iteration', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='encrypted_weight', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.encrypted_weight', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipher', full_name='com.webank.ai.fate.core.mlmodel.buffer.SingleModel.cipher', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_SINGLEMODEL_WEIGHTENTRY, _SINGLEMODEL_ENCRYPTEDWEIGHTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=713,
serialized_end=1244,
)
_ONEVSRESTRESULT = _descriptor.Descriptor(
name='OneVsRestResult',
full_name='com.webank.ai.fate.core.mlmodel.buffer.OneVsRestResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='completed_models',
full_name='com.webank.ai.fate.core.mlmodel.buffer.OneVsRestResult.completed_models',
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='one_vs_rest_classes',
full_name='com.webank.ai.fate.core.mlmodel.buffer.OneVsRestResult.one_vs_rest_classes',
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=1246,
serialized_end=1371,
)
_CIPHER = _descriptor.Descriptor(
name='Cipher',
full_name='com.webank.ai.fate.core.mlmodel.buffer.Cipher',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='public_key', full_name='com.webank.ai.fate.core.mlmodel.buffer.Cipher.public_key', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private_key', full_name='com.webank.ai.fate.core.mlmodel.buffer.Cipher.private_key', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1374,
serialized_end=1538,
)
_CIPHERPUBLICKEY = _descriptor.Descriptor(
name='CipherPublicKey',
full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherPublicKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='n', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherPublicKey.n', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1540,
serialized_end=1568,
)
_CIPHERPRIVATEKEY = _descriptor.Descriptor(
name='CipherPrivateKey',
full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherPrivateKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='p', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherPrivateKey.p', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='q', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherPrivateKey.q', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1570,
serialized_end=1610,
)
_CIPHERTEXT = _descriptor.Descriptor(
name='CipherText',
full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherText',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='public_key', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherText.public_key', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipher_text', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherText.cipher_text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exponent', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherText.exponent', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_obfuscator', full_name='com.webank.ai.fate.core.mlmodel.buffer.CipherText.is_obfuscator', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1613,
serialized_end=1764,
)
_LRMODELPARAM_WEIGHTENTRY.containing_type = _LRMODELPARAM
_LRMODELPARAM_ENCRYPTEDWEIGHTENTRY.fields_by_name['value'].message_type = _CIPHERTEXT
_LRMODELPARAM_ENCRYPTEDWEIGHTENTRY.containing_type = _LRMODELPARAM
_LRMODELPARAM.fields_by_name['weight'].message_type = _LRMODELPARAM_WEIGHTENTRY
_LRMODELPARAM.fields_by_name['one_vs_rest_result'].message_type = _ONEVSRESTRESULT
_LRMODELPARAM.fields_by_name['encrypted_weight'].message_type = _LRMODELPARAM_ENCRYPTEDWEIGHTENTRY
_LRMODELPARAM.fields_by_name['cipher'].message_type = _CIPHER
_SINGLEMODEL_WEIGHTENTRY.containing_type = _SINGLEMODEL
_SINGLEMODEL_ENCRYPTEDWEIGHTENTRY.fields_by_name['value'].message_type = _CIPHERTEXT
_SINGLEMODEL_ENCRYPTEDWEIGHTENTRY.containing_type = _SINGLEMODEL
_SINGLEMODEL.fields_by_name['weight'].message_type = _SINGLEMODEL_WEIGHTENTRY
_SINGLEMODEL.fields_by_name['encrypted_weight'].message_type = _SINGLEMODEL_ENCRYPTEDWEIGHTENTRY
_SINGLEMODEL.fields_by_name['cipher'].message_type = _CIPHER
_ONEVSRESTRESULT.fields_by_name['completed_models'].message_type = _SINGLEMODEL
_CIPHER.fields_by_name['public_key'].message_type = _CIPHERPUBLICKEY
_CIPHER.fields_by_name['private_key'].message_type = _CIPHERPRIVATEKEY
_CIPHERTEXT.fields_by_name['public_key'].message_type = _CIPHERPUBLICKEY
DESCRIPTOR.message_types_by_name['LRModelParam'] = _LRMODELPARAM
DESCRIPTOR.message_types_by_name['SingleModel'] = _SINGLEMODEL
DESCRIPTOR.message_types_by_name['OneVsRestResult'] = _ONEVSRESTRESULT
DESCRIPTOR.message_types_by_name['Cipher'] = _CIPHER
DESCRIPTOR.message_types_by_name['CipherPublicKey'] = _CIPHERPUBLICKEY
DESCRIPTOR.message_types_by_name['CipherPrivateKey'] = _CIPHERPRIVATEKEY
DESCRIPTOR.message_types_by_name['CipherText'] = _CIPHERTEXT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LRModelParam = _reflection.GeneratedProtocolMessageType('LRModelParam', (_message.Message,), dict(
WeightEntry=_reflection.GeneratedProtocolMessageType('WeightEntry', (_message.Message,), dict(
DESCRIPTOR=_LRMODELPARAM_WEIGHTENTRY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.WeightEntry)
)),
EncryptedWeightEntry=_reflection.GeneratedProtocolMessageType('EncryptedWeightEntry', (_message.Message,), dict(
DESCRIPTOR=_LRMODELPARAM_ENCRYPTEDWEIGHTENTRY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.LRModelParam.EncryptedWeightEntry)
)),
DESCRIPTOR=_LRMODELPARAM,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.LRModelParam)
))
_sym_db.RegisterMessage(LRModelParam)
_sym_db.RegisterMessage(LRModelParam.WeightEntry)
_sym_db.RegisterMessage(LRModelParam.EncryptedWeightEntry)
SingleModel = _reflection.GeneratedProtocolMessageType('SingleModel', (_message.Message,), dict(
WeightEntry=_reflection.GeneratedProtocolMessageType('WeightEntry', (_message.Message,), dict(
DESCRIPTOR=_SINGLEMODEL_WEIGHTENTRY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.SingleModel.WeightEntry)
)),
EncryptedWeightEntry=_reflection.GeneratedProtocolMessageType('EncryptedWeightEntry', (_message.Message,), dict(
DESCRIPTOR=_SINGLEMODEL_ENCRYPTEDWEIGHTENTRY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.SingleModel.EncryptedWeightEntry)
)),
DESCRIPTOR=_SINGLEMODEL,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.SingleModel)
))
_sym_db.RegisterMessage(SingleModel)
_sym_db.RegisterMessage(SingleModel.WeightEntry)
_sym_db.RegisterMessage(SingleModel.EncryptedWeightEntry)
OneVsRestResult = _reflection.GeneratedProtocolMessageType('OneVsRestResult', (_message.Message,), dict(
DESCRIPTOR=_ONEVSRESTRESULT,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.OneVsRestResult)
))
_sym_db.RegisterMessage(OneVsRestResult)
Cipher = _reflection.GeneratedProtocolMessageType('Cipher', (_message.Message,), dict(
DESCRIPTOR=_CIPHER,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.Cipher)
))
_sym_db.RegisterMessage(Cipher)
CipherPublicKey = _reflection.GeneratedProtocolMessageType('CipherPublicKey', (_message.Message,), dict(
DESCRIPTOR=_CIPHERPUBLICKEY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.CipherPublicKey)
))
_sym_db.RegisterMessage(CipherPublicKey)
CipherPrivateKey = _reflection.GeneratedProtocolMessageType('CipherPrivateKey', (_message.Message,), dict(
DESCRIPTOR=_CIPHERPRIVATEKEY,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.CipherPrivateKey)
))
_sym_db.RegisterMessage(CipherPrivateKey)
CipherText = _reflection.GeneratedProtocolMessageType('CipherText', (_message.Message,), dict(
DESCRIPTOR=_CIPHERTEXT,
__module__='lr_model_param_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.CipherText)
))
_sym_db.RegisterMessage(CipherText)
DESCRIPTOR._options = None
_LRMODELPARAM_WEIGHTENTRY._options = None
_LRMODELPARAM_ENCRYPTEDWEIGHTENTRY._options = None
_SINGLEMODEL_WEIGHTENTRY._options = None
_SINGLEMODEL_ENCRYPTEDWEIGHTENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 45.4039
| 2,848
| 0.691288
| 3,865
| 32,600
| 5.568693
| 0.061578
| 0.047577
| 0.038842
| 0.052967
| 0.819681
| 0.79566
| 0.787994
| 0.778237
| 0.757887
| 0.738187
| 0
| 0.036082
| 0.189816
| 32,600
| 717
| 2,849
| 45.467225
| 0.778813
| 0.037883
| 0
| 0.730827
| 1
| 0.001504
| 0.229775
| 0.189932
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007519
| 0
| 0.007519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
763d68843c64ee2d1f5603709f43ec51dcce4d9c
| 12,719
|
py
|
Python
|
tests/test_0020-support-unsigned-indexes.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0020-support-unsigned-indexes.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0020-support-unsigned-indexes.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/master/LICENSE
from __future__ import absolute_import
import sys
import os
import json
import pytest
import numpy
import awkward1
py27 = (sys.version_info[0] < 3)
def test_index():
array_i1 = numpy.array([numpy.iinfo("i1").min, -1, 0, 1, numpy.iinfo("i1").max], dtype="i1")
array_u1 = numpy.array([numpy.iinfo("u1").min, -1, 0, 1, numpy.iinfo("u1").max], dtype="u1")
array_li2 = numpy.array([numpy.iinfo("<i2").min, -1, 0, 1, numpy.iinfo("<i2").max], dtype="<i2")
array_lu2 = numpy.array([numpy.iinfo("<u2").min, -1, 0, 1, numpy.iinfo("<u2").max], dtype="<u2")
array_li4 = numpy.array([numpy.iinfo("<i4").min, -1, 0, 1, numpy.iinfo("<i4").max], dtype="<i4")
array_lu4 = numpy.array([numpy.iinfo("<u4").min, -1, 0, 1, numpy.iinfo("<u4").max], dtype="<u4")
array_li8 = numpy.array([numpy.iinfo("<i8").min, -1, 0, 1, numpy.iinfo("<i8").max], dtype="<i8")
array_lu8 = numpy.array([numpy.iinfo("<u8").min, -1, 0, 1, numpy.iinfo("<u8").max], dtype="<u8")
array_bi2 = numpy.array([numpy.iinfo(">i2").min, -1, 0, 1, numpy.iinfo(">i2").max], dtype=">i2")
array_bu2 = numpy.array([numpy.iinfo(">u2").min, -1, 0, 1, numpy.iinfo(">u2").max], dtype=">u2")
array_bi4 = numpy.array([numpy.iinfo(">i4").min, -1, 0, 1, numpy.iinfo(">i4").max], dtype=">i4")
array_bu4 = numpy.array([numpy.iinfo(">u4").min, -1, 0, 1, numpy.iinfo(">u4").max], dtype=">u4")
array_bi8 = numpy.array([numpy.iinfo(">i8").min, -1, 0, 1, numpy.iinfo(">i8").max], dtype=">i8")
array_bu8 = numpy.array([numpy.iinfo(">u8").min, -1, 0, 1, numpy.iinfo(">u8").max], dtype=">u8")
index_i1 = awkward1.layout.Index8(array_i1)
index_u1 = awkward1.layout.IndexU8(array_u1)
index_li2 = awkward1.layout.Index32(array_li2)
index_lu2 = awkward1.layout.Index32(array_lu2)
index_li4 = awkward1.layout.Index32(array_li4)
index_lu4 = awkward1.layout.IndexU32(array_lu4)
index_li8 = awkward1.layout.Index64(array_li8)
index_lu8 = awkward1.layout.Index64(array_lu8)
index_bi2 = awkward1.layout.Index32(array_bi2)
index_bu2 = awkward1.layout.Index32(array_bu2)
index_bi4 = awkward1.layout.Index32(array_bi4)
index_bu4 = awkward1.layout.IndexU32(array_bu4)
index_bi8 = awkward1.layout.Index64(array_bi8)
index_bu8 = awkward1.layout.Index64(array_bu8)
assert index_i1[2] == 0
assert index_u1[2] == 0
assert index_li2[2] == 0
assert index_lu2[2] == 0
assert index_li4[2] == 0
assert index_lu4[2] == 0
assert index_li8[2] == 0
assert index_lu8[2] == 0
assert index_bi2[2] == 0
assert index_bu2[2] == 0
assert index_bi4[2] == 0
assert index_bu4[2] == 0
assert index_bi8[2] == 0
assert index_bu8[2] == 0
array_i1[2] = 10
array_u1[2] = 10
array_li2[2] = 10
array_lu2[2] = 10
array_li4[2] = 10
array_lu4[2] = 10
array_li8[2] = 10
array_lu8[2] = 10
array_bi2[2] = 10
array_bu2[2] = 10
array_bi4[2] = 10
array_bu4[2] = 10
array_bi8[2] = 10
array_bu8[2] = 10
assert index_i1[2] == 10
assert index_u1[2] == 10
assert index_li2[2] == 0
assert index_lu2[2] == 0
assert index_li4[2] == 10
assert index_lu4[2] == 10
assert index_li8[2] == 10
assert index_lu8[2] == 0
assert index_bi2[2] == 0
assert index_bu2[2] == 0
assert index_bi4[2] == 0
assert index_bu4[2] == 0
assert index_bi8[2] == 0
assert index_bu8[2] == 0
content = awkward1.layout.NumpyArray(numpy.array([1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9]))
starts1 = awkward1.layout.IndexU32(numpy.array([0, 3, 3, 5, 6], numpy.uint32))
stops1 = awkward1.layout.IndexU32(numpy.array([3, 3, 5, 6, 9], numpy.uint32))
offsets1 = awkward1.layout.IndexU32(numpy.array([0, 3, 3, 5, 6, 9], numpy.uint32))
starts2 = awkward1.layout.IndexU32(numpy.array([0, 2, 3, 3], numpy.uint32))
stops2 = awkward1.layout.IndexU32(numpy.array([2, 3, 3, 5], numpy.uint32))
offsets2 = awkward1.layout.IndexU32(numpy.array([0, 2, 3, 3, 5], numpy.uint32))
def test_listarray_basic():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array1) == [[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[2]) == [4.4, 5.5]
assert awkward1.to_list(array1[1:-1]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array2) == [[[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [], [[6.6], [7.7, 8.8, 9.9]]]
assert awkward1.to_list(array2[1]) == [[4.4, 5.5]]
assert awkward1.to_list(array2[1:-1]) == [[[4.4, 5.5]], []]
def test_listoffsetarray_basic():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array1) == [[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[2]) == [4.4, 5.5]
assert awkward1.to_list(array1[1:-1]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array2) == [[[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [], [[6.6], [7.7, 8.8, 9.9]]]
assert awkward1.to_list(array2[1]) == [[4.4, 5.5]]
assert awkward1.to_list(array2[1:-1]) == [[[4.4, 5.5]], []]
def test_listarray_at():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array1[2]) == [4.4, 5.5]
assert awkward1.to_list(array1[2,]) == [4.4, 5.5]
assert awkward1.to_list(array1[2, 1:]) == [5.5]
assert awkward1.to_list(array1[2:, 0]) == [4.4, 6.6, 7.7]
assert awkward1.to_list(array1[2:, -1]) == [5.5, 6.6, 9.9]
def test_listoffsetarray_at():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array1[2,]) == [4.4, 5.5]
assert awkward1.to_list(array1[2, 1:]) == [5.5]
assert awkward1.to_list(array1[2:, 0]) == [4.4, 6.6, 7.7]
assert awkward1.to_list(array1[2:, -1]) == [5.5, 6.6, 9.9]
def test_listarray_slice():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array1[1:-1]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array1[1:-1,]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array2[1:-1]) == [[[4.4, 5.5]], []]
assert awkward1.to_list(array2[1:-1,]) == [[[4.4, 5.5]], []]
def test_listoffsetarray_slice():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array1[1:-1]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array1[1:-1,]) == [[], [4.4, 5.5], [6.6]]
assert awkward1.to_list(array2[1:-1]) == [[[4.4, 5.5]], []]
assert awkward1.to_list(array2[1:-1,]) == [[[4.4, 5.5]], []]
def test_listarray_slice_slice():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array1[2:]) == [[4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[2:, 1:]) == [[5.5], [], [8.8, 9.9]]
assert awkward1.to_list(array1[2:,:-1]) == [[4.4], [], [7.7, 8.8]]
def test_listoffsetarray_slice_slice():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array1[2:]) == [[4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[2:, 1:]) == [[5.5], [], [8.8, 9.9]]
assert awkward1.to_list(array1[2:,:-1]) == [[4.4], [], [7.7, 8.8]]
def test_listarray_ellipsis():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
if not py27:
assert awkward1.to_list(array1[Ellipsis, 1:]) == [[2.2, 3.3], [], [5.5], [], [8.8, 9.9]]
assert awkward1.to_list(array2[Ellipsis, 1:]) == [[[2.2, 3.3], []], [[5.5]], [], [[], [8.8, 9.9]]]
def test_listoffsetarray_ellipsis():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
if not py27:
assert awkward1.to_list(array1[Ellipsis, 1:]) == [[2.2, 3.3], [], [5.5], [], [8.8, 9.9]]
assert awkward1.to_list(array2[Ellipsis, 1:]) == [[[2.2, 3.3], []], [[5.5]], [], [[], [8.8, 9.9]]]
def test_listarray_array_slice():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0]]) == [[[1.1, 2.2, 3.3], []], [[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [[4.4, 5.5]], [[4.4, 5.5]], [[1.1, 2.2, 3.3], []]]
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0], :]) == [[[1.1, 2.2, 3.3], []], [[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [[4.4, 5.5]], [[4.4, 5.5]], [[1.1, 2.2, 3.3], []]]
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0], :, 1:]) == [[[2.2, 3.3], []], [[2.2, 3.3], []], [[5.5]], [[5.5]], [[5.5]], [[2.2, 3.3], []]]
def test_listoffsetarray_array_slice():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0]]) == [[[1.1, 2.2, 3.3], []], [[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [[4.4, 5.5]], [[4.4, 5.5]], [[1.1, 2.2, 3.3], []]]
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0], :]) == [[[1.1, 2.2, 3.3], []], [[1.1, 2.2, 3.3], []], [[4.4, 5.5]], [[4.4, 5.5]], [[4.4, 5.5]], [[1.1, 2.2, 3.3], []]]
assert awkward1.to_list(array2[[0, 0, 1, 1, 1, 0], :, 1:]) == [[[2.2, 3.3], []], [[2.2, 3.3], []], [[5.5]], [[5.5]], [[5.5]], [[2.2, 3.3], []]]
def test_listarray_array():
array1 = awkward1.layout.ListArrayU32(starts1, stops1, content)
array2 = awkward1.layout.ListArrayU32(starts2, stops2, array1)
assert awkward1.to_list(array1[numpy.array([2, 0, 0, 1, -1])]) == [[4.4, 5.5], [1.1, 2.2, 3.3], [1.1, 2.2, 3.3], [], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0])]) == [5.5, 2.2, 1.1, 7.7]
content_deep = awkward1.layout.NumpyArray(numpy.array([[0, 0], [1, 10], [2, 20], [3, 30], [4, 40], [5, 50], [6, 60], [7, 70], [8, 80]]))
starts1_deep = awkward1.layout.IndexU32(numpy.array([0, 3, 6]))
stops1_deep = awkward1.layout.IndexU32(numpy.array([3, 6, 9]))
array1_deep = awkward1.layout.ListArrayU32(starts1_deep, stops1_deep, content_deep)
assert awkward1.to_list(array1_deep) == [[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]]
s = (numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0]), numpy.array([0, 1, 0, 1]))
assert numpy.array([[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]])[s].tolist() == awkward1.to_list(array1_deep[s])
s = (numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0]), slice(1, None))
assert numpy.array([[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]])[s].tolist() == awkward1.to_list(array1_deep[s])
def test_listoffsetarray_array():
array1 = awkward1.layout.ListOffsetArrayU32(offsets1, content)
array2 = awkward1.layout.ListOffsetArrayU32(offsets2, array1)
assert awkward1.to_list(array1[numpy.array([2, 0, 0, 1, -1])]) == [[4.4, 5.5], [1.1, 2.2, 3.3], [1.1, 2.2, 3.3], [], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array1[numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0])]) == [5.5, 2.2, 1.1, 7.7]
content_deep = awkward1.layout.NumpyArray(numpy.array([[0, 0], [1, 10], [2, 20], [3, 30], [4, 40], [5, 50], [6, 60], [7, 70], [8, 80]]))
starts1_deep = awkward1.layout.IndexU32(numpy.array([0, 3, 6]))
stops1_deep = awkward1.layout.IndexU32(numpy.array([3, 6, 9]))
array1_deep = awkward1.layout.ListArrayU32(starts1_deep, stops1_deep, content_deep)
assert awkward1.to_list(array1_deep) == [[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]]
s = (numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0]), numpy.array([0, 1, 0, 1]))
assert numpy.array([[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]])[s].tolist() == awkward1.to_list(array1_deep[s])
s = (numpy.array([2, 0, 0, -1]), numpy.array([1, 1, 0, 0]), slice(1, None))
assert numpy.array([[[0, 0], [1, 10], [2, 20]], [[3, 30], [4, 40], [5, 50]], [[6, 60], [7, 70], [8, 80]]])[s].tolist() == awkward1.to_list(array1_deep[s])
| 55.3
| 173
| 0.593443
| 2,143
| 12,719
| 3.429771
| 0.048997
| 0.016871
| 0.104762
| 0.138776
| 0.82068
| 0.802449
| 0.790068
| 0.786259
| 0.786259
| 0.776463
| 0
| 0.14889
| 0.16039
| 12,719
| 229
| 174
| 55.541485
| 0.539376
| 0.00684
| 0
| 0.583756
| 0
| 0
| 0.009501
| 0
| 0
| 0
| 0
| 0
| 0.42132
| 1
| 0.076142
| false
| 0
| 0.035533
| 0
| 0.111675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
767933a3cbc4c4860d84dbb36a3ae605a156b0cb
| 9,566
|
py
|
Python
|
src/condor_tensorflow/metrics.py
|
GarrettJenkinson/condor_tensorflow
|
db715a2db6a5c0dbf610f5ad82cec16e2ab3d3d8
|
[
"Apache-2.0"
] | 9
|
2021-10-31T16:39:35.000Z
|
2022-02-19T17:51:07.000Z
|
src/condor_tensorflow/metrics.py
|
GarrettJenkinson/condor_tensorflow
|
db715a2db6a5c0dbf610f5ad82cec16e2ab3d3d8
|
[
"Apache-2.0"
] | 4
|
2022-01-01T19:52:55.000Z
|
2022-02-16T00:38:40.000Z
|
src/condor_tensorflow/metrics.py
|
GarrettJenkinson/condor_tensorflow
|
db715a2db6a5c0dbf610f5ad82cec16e2ab3d3d8
|
[
"Apache-2.0"
] | 4
|
2021-10-31T17:50:29.000Z
|
2022-02-11T02:54:47.000Z
|
import tensorflow as tf
from tensorflow.keras import backend as K
class OrdinalMeanAbsoluteError(tf.keras.metrics.Metric):
"""Computes mean absolute error for ordinal labels."""
def __init__(self, name="mean_absolute_error_labels",
**kwargs):
"""Creates a `OrdinalMeanAbsoluteError` instance."""
super().__init__(name=name, **kwargs)
self.maes = self.add_weight(name='maes', initializer='zeros')
self.count = self.add_weight(name='count', initializer='zeros')
def update_state(self, y_true, y_pred, sample_weight=None):
"""Computes mean absolute error for ordinal labels.
Args:
y_true: Cumulatiuve logits from CondorOrdinal layer.
y_pred: CondorOrdinal Encoded Labels.
sample_weight (optional): Not implemented.
"""
# Predict the label as in Cao et al. - using cumulative probabilities
cum_probs = tf.math.cumprod(
tf.math.sigmoid(y_pred),
axis=1) # tf.map_fn(tf.math.sigmoid, y_pred)
# Calculate the labels using the style of Cao et al.
above_thresh = tf.map_fn(
lambda x: tf.cast(
x > 0.5,
tf.float32),
cum_probs)
# Sum across columns to estimate how many cumulative thresholds are
# passed.
labels_v2 = tf.reduce_sum(above_thresh, axis=1)
y_true = tf.cast(tf.reduce_sum(y_true, axis=1), y_pred.dtype)
# remove all dimensions of size 1 (e.g., from [[1], [2]], to [1, 2])
y_true = tf.squeeze(y_true)
if sample_weight is not None:
values = tf.abs(y_true - labels_v2)
sample_weight = tf.cast(tf.squeeze(sample_weight), y_pred.dtype)
sample_weight = tf.broadcast_to(sample_weight, values.shape)
values = tf.multiply(values, sample_weight)
self.maes.assign_add(tf.reduce_sum(values))
self.count.assign_add(tf.reduce_sum(sample_weight))
else:
self.maes.assign_add(tf.reduce_sum(tf.abs(y_true - labels_v2)))
self.count.assign_add(tf.cast(tf.size(y_true), tf.float32))
def result(self):
return tf.math.divide_no_nan(self.maes, self.count)
def reset_state(self):
"""Resets all of the metric state variables at the start of each epoch."""
self.maes.assign(0.0)
self.count.assign(0.0)
def get_config(self):
"""Returns the serializable config of the metric."""
config = {}
base_config = super().get_config()
return {**base_config, **config}
class SparseOrdinalMeanAbsoluteError(OrdinalMeanAbsoluteError):
"""Computes mean absolute error for ordinal labels."""
def __init__(self, name="mean_absolute_error_labels",
**kwargs):
"""Creates a `OrdinalMeanAbsoluteError` instance."""
super().__init__(name=name, **kwargs)
def update_state(self, y_true, y_pred, sample_weight=None):
"""Computes mean absolute error for ordinal labels.
Args:
y_true: Cumulatiuve logits from CondorOrdinal layer.
y_pred: CondorOrdinal Encoded Labels.
sample_weight (optional): Not implemented.
"""
# Predict the label as in Cao et al. - using cumulative probabilities
cum_probs = tf.math.cumprod(
tf.math.sigmoid(y_pred),
axis=1) # tf.map_fn(tf.math.sigmoid, y_pred)
# Calculate the labels using the style of Cao et al.
above_thresh = tf.map_fn(
lambda x: tf.cast(
x > 0.5,
tf.float32),
cum_probs)
# Sum across columns to estimate how many cumulative thresholds are
# passed.
labels_v2 = tf.reduce_sum(above_thresh, axis=1)
y_true = tf.cast(y_true, y_pred.dtype)
# remove all dimensions of size 1 (e.g., from [[1], [2]], to [1, 2])
y_true = tf.squeeze(y_true)
if sample_weight is not None:
values = tf.abs(y_true - labels_v2)
sample_weight = tf.cast(tf.squeeze(sample_weight), y_pred.dtype)
sample_weight = tf.broadcast_to(sample_weight, values.shape)
values = tf.multiply(values, sample_weight)
self.maes.assign_add(tf.reduce_sum(values))
self.count.assign_add(tf.reduce_sum(sample_weight))
else:
self.maes.assign_add(tf.reduce_sum(tf.abs(y_true - labels_v2)))
self.count.assign_add(tf.cast(tf.size(y_true), tf.float32))
class OrdinalAccuracy(tf.keras.metrics.Metric):
"""Computes accuracy for ordinal labels (tolerance is allowed rank
distance to be considered 'correct' predictions)."""
def __init__(self, name=None,
tolerance=0,
**kwargs):
"""Creates a `OrdinalAccuracy` instance."""
if name is not None:
super().__init__(name=name, **kwargs)
else:
super().__init__(name="ordinal_accuracy_tol"+str(tolerance),
**kwargs)
self.accs = self.add_weight(name='accs', initializer='zeros')
self.count = self.add_weight(name='count', initializer='zeros')
self.tolerance = tolerance
def update_state(self, y_true, y_pred, sample_weight=None):
"""Computes accuracy for ordinal labels.
Args:
y_true: Cumulatiuve logits from CondorOrdinal layer.
y_pred: CondorOrdinal Encoded Labels.
sample_weight (optional): Not implemented.
"""
# Predict the label as in Cao et al. - using cumulative probabilities
cum_probs = tf.math.cumprod(
tf.math.sigmoid(y_pred),
axis=1) # tf.map_fn(tf.math.sigmoid, y_pred)
# Calculate the labels using the style of Cao et al.
above_thresh = tf.map_fn(
lambda x: tf.cast(
x > 0.5,
tf.float32),
cum_probs)
# Sum across columns to estimate how many cumulative thresholds are
# passed.
labels_v2 = tf.reduce_sum(above_thresh, axis=1)
y_true = tf.cast(tf.reduce_sum(y_true, axis=1), y_pred.dtype)
# remove all dimensions of size 1 (e.g., from [[1], [2]], to [1, 2])
y_true = tf.squeeze(y_true)
if sample_weight is not None:
values = tf.cast(tf.less_equal(
tf.abs(y_true-labels_v2),tf.cast(self.tolerance,y_pred.dtype)),
y_pred.dtype)
sample_weight = tf.cast(tf.squeeze(sample_weight), y_pred.dtype)
sample_weight = tf.broadcast_to(sample_weight, values.shape)
values = tf.multiply(values, sample_weight)
self.accs.assign_add(tf.reduce_sum(values))
self.count.assign_add(tf.reduce_sum(sample_weight))
else:
self.accs.assign_add(tf.reduce_sum(tf.cast(tf.less_equal(
tf.abs(y_true-labels_v2),tf.cast(self.tolerance,y_pred.dtype)),
y_pred.dtype)))
self.count.assign_add(tf.cast(tf.size(y_true), tf.float32))
def result(self):
return tf.math.divide_no_nan(self.accs, self.count)
def reset_state(self):
"""Resets all of the metric state variables at the start of each epoch."""
self.accs.assign(0.0)
self.count.assign(0.0)
def get_config(self):
"""Returns the serializable config of the metric."""
config = {'tolerance': self.tolerance}
base_config = super().get_config()
return {**base_config, **config}
class SparseOrdinalAccuracy(OrdinalAccuracy):
"""Computes accuracy for ordinal labels (tolerance is allowed rank
distance to be considered 'correct' predictions)."""
def update_state(self, y_true, y_pred, sample_weight=None):
"""Computes accuracy for ordinal labels.
Args:
y_true: Cumulatiuve logits from CondorOrdinal layer.
y_pred: CondorOrdinal Encoded Labels.
sample_weight (optional): Not implemented.
"""
# Predict the label as in Cao et al. - using cumulative probabilities
cum_probs = tf.math.cumprod(
tf.math.sigmoid(y_pred),
axis=1) # tf.map_fn(tf.math.sigmoid, y_pred)
# Calculate the labels using the style of Cao et al.
above_thresh = tf.map_fn(
lambda x: tf.cast(
x > 0.5,
tf.float32),
cum_probs)
# Sum across columns to estimate how many cumulative thresholds are
# passed.
labels_v2 = tf.reduce_sum(above_thresh, axis=1)
y_true = tf.cast(y_true, y_pred.dtype)
# remove all dimensions of size 1 (e.g., from [[1], [2]], to [1, 2])
y_true = tf.squeeze(y_true)
if sample_weight is not None:
values = tf.cast(tf.less_equal(
tf.abs(y_true-labels_v2),tf.cast(self.tolerance,y_pred.dtype)),
y_pred.dtype)
sample_weight = tf.cast(tf.squeeze(sample_weight), y_pred.dtype)
sample_weight = tf.broadcast_to(sample_weight, values.shape)
values = tf.multiply(values, sample_weight)
self.accs.assign_add(tf.reduce_sum(values))
self.count.assign_add(tf.reduce_sum(sample_weight))
else:
self.accs.assign_add(tf.reduce_sum(tf.cast(tf.less_equal(
tf.abs(y_true-labels_v2),tf.cast(self.tolerance,y_pred.dtype)),
y_pred.dtype)))
self.count.assign_add(tf.cast(tf.size(y_true), tf.float32))
| 38.728745
| 82
| 0.614363
| 1,267
| 9,566
| 4.449092
| 0.113654
| 0.031932
| 0.035125
| 0.036189
| 0.921412
| 0.910236
| 0.910236
| 0.910236
| 0.910236
| 0.910236
| 0
| 0.010863
| 0.278277
| 9,566
| 246
| 83
| 38.886179
| 0.80562
| 0.275141
| 0
| 0.856115
| 0
| 0
| 0.017863
| 0.007805
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093525
| false
| 0
| 0.014388
| 0.014388
| 0.165468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7694daebda6c0f2912909d3b0f726a4c5cc3022b
| 10,869
|
py
|
Python
|
Hogweed.py
|
damianStrojek/Python-GameOfLife
|
edd707745ab6b933d2aea0ef5ec8c92cc60e939f
|
[
"FSFAP"
] | 4
|
2021-06-02T10:05:01.000Z
|
2022-03-19T14:01:31.000Z
|
Hogweed.py
|
damianStrojek/Python-GameOfLife
|
edd707745ab6b933d2aea0ef5ec8c92cc60e939f
|
[
"FSFAP"
] | null | null | null |
Hogweed.py
|
damianStrojek/Python-GameOfLife
|
edd707745ab6b933d2aea0ef5ec8c92cc60e939f
|
[
"FSFAP"
] | null | null | null |
# OOP PG WETI PROJECT NR 2
# Damian Strojek s184407 2021 IT/CS
# @ Copyright 2021, Damian Strojek, All rights reserved.
import pygame, os
from random import randrange
from Plant import Plant
class Hogweed(Plant):
def __init__(self, _currentWorld, _positionX, _positionY):
super(Hogweed, self).__init__(_currentWorld, _positionX, _positionY)
self.strength = 10
def getName(self):
return "Hogweed"
def getImage(self):
if self.currentWorld.getWorldType() == 1:
self.image = pygame.image.load(os.path.join('icons', 'hogweedhex.jpg'))
else:
self.image = pygame.image.load(os.path.join('icons', 'hogweed.png'))
self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight()))
return self.image
def clone(self, newPosition):
self.currentWorld.myMap[newPosition[0]][newPosition[1]] = Hogweed(self.currentWorld, newPosition[0], newPosition[1])
self.currentWorld.addToAdd(self.currentWorld.myMap[newPosition[0]][newPosition[1]])
def action(self):
self.age += 1
from Dirt import Dirt
from Cybersheep import Cybersheep
from Animal import Animal
if self.currentWorld.getWorldType() == 0:
# UP
if self.getY() != 0 and not \
isinstance(self.currentWorld.myMap[self.getX()][self.getY()-1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()][self.getY()-1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()][self.getY()-1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()][self.getY()-1].getName() + \
" at (" + str(self.getX()+1) + "," + str(self.getY()) + ").")
self.currentWorld.myMap[self.getX()][self.getY()-1].died()
self.currentWorld.myMap[self.getX()][self.getY()-1].setDirtOnMap()
# RIGHT
if self.getX() != self.currentWorld.getWorldWidth()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()+1][self.getY()].getName() + \
" at (" + str(self.getX()+2) + "," + str(self.getY()+1) + ").")
self.currentWorld.myMap[self.getX()+1][self.getY()].died()
self.currentWorld.myMap[self.getX()+1][self.getY()].setDirtOnMap()
# DOWN
if self.getY() != self.currentWorld.getWorldHeight()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()][self.getY()+1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()][self.getY()+1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()][self.getY()+1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()][self.getY()+1].getName() + \
" at (" + str(self.getX()+1) + "," + str(self.getY()+2) + ").")
self.currentWorld.myMap[self.getX()][self.getY()+1].died()
self.currentWorld.myMap[self.getX()][self.getY()+1].setDirtOnMap()
# LEFT
if self.getX() != 0 and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()-1][self.getY()].getName() + \
" at (" + str(self.getX()) + "," + str(self.getY()+1) + ").")
self.currentWorld.myMap[self.getX()-1][self.getY()].died()
self.currentWorld.myMap[self.getX()-1][self.getY()].setDirtOnMap()
elif self.currentWorld.getWorldType() == 1:
# UP LEFT
if self.getX() != 0 and self.getY() != 0 and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()-1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()-1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()-1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()-1][self.getY()-1].getName() + \
" at (" + str(self.getX()) + "," + str(self.getY()) + ").")
self.currentWorld.myMap[self.getX()-1][self.getY()-1].died()
self.currentWorld.myMap[self.getX()-1][self.getY()-1].setDirtOnMap()
# UP RIGHT
if self.getY() != 0 and self.getX() != self.currentWorld.getWorldWidth()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()-1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()][self.getY()-1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()][self.getY()-1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()+1][self.getY()-1].getName() + \
" at (" + str(self.getX()+2) + "," + str(self.getY()) + ").")
self.currentWorld.myMap[self.getX()+1][self.getY()-1].died()
self.currentWorld.myMap[self.getX()+1][self.getY()-1].setDirtOnMap()
# RIGHT
if self.getX() != self.currentWorld.getWorldWidth()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()+1][self.getY()].getName() + \
" at (" + str(self.getX()+2) + "," + str(self.getY()-1) + ").")
self.currentWorld.myMap[self.getX()+1][self.getY()].died()
self.currentWorld.myMap[self.getX()+1][self.getY()].setDirtOnMap()
# DOWN RIGHT
if self.getX() != self.currentWorld.getWorldWidth()-1 and \
self.getY() != self.currentWorld.getWorldHeight()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()+1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()+1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()+1][self.getY()+1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()+1][self.getY()+1].getName() + \
" at (" + str(self.getX()+2) + "," + str(self.getY()+2) + ").")
self.currentWorld.myMap[self.getX()+1][self.getY()+1].died()
self.currentWorld.myMap[self.getX()+1][self.getY()+1].setDirtOnMap()
# DOWN LEFT
if self.getX() != 0 and \
self.getY() != self.currentWorld.getWorldHeight()-1 and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()+1], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()+1], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()+1], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()-1) + "," \
+ str(self.getY()-1) + ") burns down " + \
self.currentWorld.myMap[self.getX()-1][self.getY()+1].getName() + \
" at (" + str(self.getX()) + "," + str(self.getY()+1) + ").")
self.currentWorld.myMap[self.getX()-1][self.getY()+1].died()
self.currentWorld.myMap[self.getX()-1][self.getY()+1].setDirtOnMap()
# LEFT
if self.getX() != 0 and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Dirt):
if isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Animal) and not \
isinstance(self.currentWorld.myMap[self.getX()-1][self.getY()], Cybersheep):
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," \
+ str(self.getY()+1) + ") burns down " + \
self.currentWorld.myMap[self.getX()-1][self.getY()].getName() + \
" at (" + str(self.getX()) + "," + str(self.getY()+1) + ").")
self.currentWorld.myMap[self.getX()-1][self.getY()].died()
self.currentWorld.myMap[self.getX()-1][self.getY()].setDirtOnMap()
# SEWING, only 15% to sew
randomTick = randrange(1, 100, 1)
if randomTick > 85:
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," + str(self.getY()+1) + ") is sewing.")
newPosition = self.findNewUnoccupiedField()
if newPosition[0] == None or newPosition[1] == None:
self.currentWorld.log(self.getName() + " at (" + str(self.getX()+1) + "," + str(self.getY()+1) + " sewing failed.")
return
else:
self.clone(newPosition)
| 60.72067
| 132
| 0.523967
| 1,194
| 10,869
| 4.757956
| 0.078727
| 0.126738
| 0.229185
| 0.264038
| 0.837177
| 0.83436
| 0.83436
| 0.818518
| 0.812709
| 0.790706
| 0
| 0.022165
| 0.290183
| 10,869
| 179
| 133
| 60.72067
| 0.714193
| 0.018953
| 0
| 0.416058
| 0
| 0
| 0.033521
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036496
| false
| 0
| 0.043796
| 0.007299
| 0.109489
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.